You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@asterixdb.apache.org by im...@apache.org on 2016/04/07 16:59:36 UTC

[01/50] [abbrv] incubator-asterixdb git commit: Move TLP pom back up

Repository: incubator-asterixdb
Updated Branches:
  refs/heads/master 877407a32 -> d3d24af45
Updated Tags:  refs/tags/apache-asterixdb-hyracks-0.2.17-incubating [created] b02eb0ac4
  refs/tags/apache-asterixdb-hyracks-0.2.17-incubating-rc0 [created] 9a9d08768
  refs/tags/apache-asterixdb-hyracks-0.2.17-incubating-rc1 [created] 9891ec54f
  refs/tags/fullstack-0.2.10 [created] b3f432244
  refs/tags/fullstack-0.2.11 [created] a23536c3b
  refs/tags/fullstack-0.2.12 [created] 3f91a13f9
  refs/tags/fullstack-0.2.13 [created] e8fb4f4e2
  refs/tags/fullstack-0.2.14 [created] 2b164aab2
  refs/tags/fullstack-0.2.15 [created] 2636e1047
  refs/tags/fullstack-0.2.16-incubating [created] e8a7eb25d
  refs/tags/fullstack-0.2.5 [created] 731f9dc7d
  refs/tags/fullstack-0.2.6 [created] 108e7c0b4
  refs/tags/fullstack-0.2.7 [created] 4f883c0db
  refs/tags/fullstack-0.2.8 [created] 69e827a1a
  refs/tags/fullstack-0.2.9 [created] fc0a83419
  refs/tags/vault/aggregators_dev_next [created] 9cdaa9c32
  refs/tags/vault/fullstack_asterix_stabilization [created] 05ab87d32
  refs/tags/vault/fullstack_genomix@2593 [created] 9ba38a283
  refs/tags/vault/fullstack_hyracks_ioc [created] 79359baf8
  refs/tags/vault/fullstack_imru@2585 [created] 1e0626372
  refs/tags/vault/fullstack_lsm_staging [created] e78b6406b
  refs/tags/vault/fullstack_lsm_staging@3347 [created] e2554749d
  refs/tags/vault/fullstack_lsm_staging_issue_305 [created] c064c6840
  refs/tags/vault/fullstack_lsm_staging_issue_97 [created] 1ab11c52a
  refs/tags/vault/fullstack_pregelix_fix [created] 2f177a89c
  refs/tags/vault/fullstack_staging [created] 596a440ba
  refs/tags/vault/fullstack_staging@1956 [created] 8cc07473a
  refs/tags/vault/fullstack_staging@2391 [created] 734fad9f1
  refs/tags/vault/fullstack_staging_bigmerge_target [created] d4ee025d6
  refs/tags/vault/hyracks-next [created] dcaf12c08
  refs/tags/vault/hyracks_admin_console@327 [created] 6861b7fe9
  refs/tags/vault/hyracks_algebricks_integration [created] d7e2c7500
  refs/tags/vault/hyracks_aqua_changes [created] 33228b5a1
  refs/tags/vault/hyracks_aqua_changes@403 [created] e8e504b4c
  refs/tags/vault/hyracks_btree_updates_next [created] 51a7c2262
  refs/tags/vault/hyracks_create_documentation [created] 45a8b29a4
  refs/tags/vault/hyracks_create_documentation@280 [created] 16dfe9ff4
  refs/tags/vault/hyracks_dev_next@501 [created] ae034530b
  refs/tags/vault/hyracks_dev_next@539 [created] bf2a1257e
  refs/tags/vault/hyracks_hadoop_compat_changes [created] 6e127bbe4
  refs/tags/vault/hyracks_hadoop_compat_changes@458 [created] 7f6203730
  refs/tags/vault/hyracks_indexes [created] 38990af91
  refs/tags/vault/hyracks_inverted_index_updates_new [created] deb4e5daf
  refs/tags/vault/hyracks_io_management@287 [created] e6c4a9558
  refs/tags/vault/hyracks_isolation [created] 73e3adcc1
  refs/tags/vault/hyracks_lsm_experiments [created] 36af393d6
  refs/tags/vault/hyracks_lsm_length_filter [created] 7a49cd913
  refs/tags/vault/hyracks_lsm_tree [created] d2119f2c3
  refs/tags/vault/hyracks_lsm_tree_bloom_filter [created] ab4c8f991
  refs/tags/vault/hyracks_multicomparator_opt [created] 4247dce55
  refs/tags/vault/hyracks_online_aggregation@185 [created] b123d0bc7
  refs/tags/vault/hyracks_scheduling@296 [created] fed8d8597
  refs/tags/vault/hyracks_scheduling@328 [created] acbed0359
  refs/tags/vault/hyracks_sort_join_opts [created] 110a9b9e3
  refs/tags/vault/hyracks_spilling_groupby [created] 29c9a558c
  refs/tags/vault/hyracks_spilling_groupby@299 [created] 0c1ba09ba
  refs/tags/vault/hyracks_spilling_groupby_perf@326 [created] fd19638e5
  refs/tags/vault/hyracks_storage_cleanup [created] 7edf4e8f9
  refs/tags/vault/hyracks_storage_cleanup@236 [created] 4c1c47609
  refs/tags/vault/hyracks_v0.2@173 [created] 62379f090
  refs/tags/vault/hyrax-next [created] 595d5d600
  refs/tags/vault/hyrax-next@10 [created] b5f74e385


Move TLP pom back up


Project: http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/commit/bc0607df
Tree: http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/tree/bc0607df
Diff: http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/diff/bc0607df

Branch: refs/heads/master
Commit: bc0607df4f8d4469fadec09c3361533565ec5ad8
Parents: 6de6915
Author: Ian Maxon <im...@apache.org>
Authored: Wed Mar 30 17:33:09 2016 -0700
Committer: Ian Maxon <im...@apache.org>
Committed: Wed Mar 30 17:33:09 2016 -0700

----------------------------------------------------------------------
 pom.xml     | 40 ++++++++++++++++++++++++++++++++++++++++
 tlp/pom.xml | 40 ----------------------------------------
 2 files changed, 40 insertions(+), 40 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/bc0607df/pom.xml
----------------------------------------------------------------------
diff --git a/pom.xml b/pom.xml
new file mode 100644
index 0000000..b3ff069
--- /dev/null
+++ b/pom.xml
@@ -0,0 +1,40 @@
+<!--
+ ! Licensed to the Apache Software Foundation (ASF) under one
+ ! or more contributor license agreements.  See the NOTICE file
+ ! distributed with this work for additional information
+ ! regarding copyright ownership.  The ASF licenses this file
+ ! to you under the Apache License, Version 2.0 (the
+ ! "License"); you may not use this file except in compliance
+ ! with the License.  You may obtain a copy of the License at
+ !
+ !   http://www.apache.org/licenses/LICENSE-2.0
+ !
+ ! Unless required by applicable law or agreed to in writing,
+ ! software distributed under the License is distributed on an
+ ! "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ ! KIND, either express or implied.  See the License for the
+ ! specific language governing permissions and limitations
+ ! under the License.
+ !-->
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+  <modelVersion>4.0.0</modelVersion>
+  <groupId>org.apache</groupId>
+  <artifactId>experimental</artifactId>
+  <version>0.0.1</version>
+  <packaging>pom</packaging>
+  <name>hyracks-asterix</name>
+
+  <licenses>
+    <license>
+      <name>Apache License, Version 2.0</name>
+      <url>http://www.apache.org/licenses/LICENSE-2.0.txt</url>
+      <distribution>repo</distribution>
+      <comments>A business-friendly OSS license</comments>
+    </license>
+  </licenses>
+
+  <modules>
+    <module>hyracks</module>
+    <module>asterixdb</module>
+  </modules>
+</project>

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/bc0607df/tlp/pom.xml
----------------------------------------------------------------------
diff --git a/tlp/pom.xml b/tlp/pom.xml
deleted file mode 100644
index b3ff069..0000000
--- a/tlp/pom.xml
+++ /dev/null
@@ -1,40 +0,0 @@
-<!--
- ! Licensed to the Apache Software Foundation (ASF) under one
- ! or more contributor license agreements.  See the NOTICE file
- ! distributed with this work for additional information
- ! regarding copyright ownership.  The ASF licenses this file
- ! to you under the Apache License, Version 2.0 (the
- ! "License"); you may not use this file except in compliance
- ! with the License.  You may obtain a copy of the License at
- !
- !   http://www.apache.org/licenses/LICENSE-2.0
- !
- ! Unless required by applicable law or agreed to in writing,
- ! software distributed under the License is distributed on an
- ! "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- ! KIND, either express or implied.  See the License for the
- ! specific language governing permissions and limitations
- ! under the License.
- !-->
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
-  <modelVersion>4.0.0</modelVersion>
-  <groupId>org.apache</groupId>
-  <artifactId>experimental</artifactId>
-  <version>0.0.1</version>
-  <packaging>pom</packaging>
-  <name>hyracks-asterix</name>
-
-  <licenses>
-    <license>
-      <name>Apache License, Version 2.0</name>
-      <url>http://www.apache.org/licenses/LICENSE-2.0.txt</url>
-      <distribution>repo</distribution>
-      <comments>A business-friendly OSS license</comments>
-    </license>
-  </licenses>
-
-  <modules>
-    <module>hyracks</module>
-    <module>asterixdb</module>
-  </modules>
-</project>


[40/50] [abbrv] incubator-asterixdb git commit: Move merged files

Posted by im...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d630d1a2/asterixdb/asterix-app/src/test/resources/runtimets/results/external-library/classad-parser-new/classad-parser-new.1.adm
----------------------------------------------------------------------
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/external-library/classad-parser-new/classad-parser-new.1.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/external-library/classad-parser-new/classad-parser-new.1.adm
new file mode 100644
index 0000000..f54dee9
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/external-library/classad-parser-new/classad-parser-new.1.adm
@@ -0,0 +1,100 @@
+{ "GlobalJobId": "submit-3.chtc.wisc.edu#48968872.0#1445354636", "StatsLifetimeStarter": 572059, "JobStartDate": 1445362267, "SubmitEventNotes": "DAG Node: fabp4-0002+fabp4-0002", "JobStatus": 4, "LeaveJobInQueue": false, "WantGlidein": true, "StartdPrincipal": "execute-side@matchsession/128.104.119.175", "WantRHEL6": true, "OnExitRemove": true, "JobCurrentStartDate": 1445561276, "CoreSize": 0, "MATCH_EXP_JOBGLIDEIN_ResourceName": "wisc.edu", "Rank": 0.0d, "ExitStatus": 0, "ResidentSetSize": 125000, "ScheddBday": 1445383086, "RemoteWallClockTime": 769511.0d, "WantCheckpoint": false, "In": "/dev/null", "LastVacateTime": 1445546251, "MaxHosts": 1, "RootDir": "/", "NumRestarts": 0, "RecentBlockWriteKbytes": 0, "DiskUsage_RAW": 32543, "EnteredCurrentStatus": 1446133322, "ResidentSetSize_RAW": 100432, "RequestDisk": 1000000, "MyType": "Job", "PeriodicRemove": false, "Cmd": "/home/ssericksen/dude-14-xdock/ChtcRun/chtcjobwrapper", "CondorVersion": "$CondorVersion: 8.5.0 Sep 16 2015 BuildID
 : 341710 $", "ShouldTransferFiles": "YES", "TargetType": "Machine", "MinHosts": 1, "NumCkpts_RAW": 0, "RequestCpus": 1, "RemoteUserCpu": 571737.0d, "BlockWrites": 0, "NiceUser": false, "Out": "process.out", "ImageSize_RAW": 690056, "BytesSent": 3113566.0d, "CumulativeSuspensionTime": 0, "TransferIn": false, "NumCkpts": 0, "Err": "process.err", "RecentBlockWrites": 0, "JobFinishedHookDone": 1446133322, "ProcId": 0, "ImageSize": 750000, "JobUniverse": 5, "EncryptExecuteDirectory": false, "TransferInputSizeMB": 12, "RecentBlockReads": 0, "SpooledOutputFiles": "ChtcWrapperfabp4-0002.out,AuditLog.fabp4-0002,poses.mol2,CURLTIME_4057178,harvest.log,time_elapsed.log,surf_scores.txt,CURLTIME_38803,count.log,fabp4-0002.out,CURLTIME_253463", "NumJobReconnects": 1, "WantFlocking": true, "BlockWriteKbytes": 0, "WhenToTransferOutput": "ON_EXIT_OR_EVICT", "JobCurrentStartExecutingDate": 1445561278, "ExitBySignal": false, "LastMatchTime": 1445561276, "OnExitHold": false, "OrigMaxHosts": 1, "Request
 Memory": 1000, "NumJobStarts": 6, "TerminationPending": true, "TotalSuspensions": 0, "BlockReads": 0, "DAGManJobId": 48940805, "MemoryUsage": "( ( ResidentSetSize + 1023 ) / 1024 )", "ExitCode": 0, "JobNotification": 0, "BlockReadKbytes": 0, "NumJobMatches": 6, "LocalUserCpu": 0.0d, "LastJobStatus": 2, "BufferBlockSize": 32768, "CommittedTime": 572046, "ExecutableSize_RAW": 6, "LastRejMatchReason": "no match found", "LastSuspensionTime": 0, "UserLog": "/home/ssericksen/dude-14-xdock/ChtcRun/dude14-surf-out-esr1/fabp4-0002/process.log", "DAGManNodesMask": "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27", "CumulativeSlotTime": 769511.0d, "LastJobLeaseRenewal": 1446133322, "MachineAttrSlotWeight0": 1, "NumSystemHolds": 0, "BytesRecvd": 8.7351688E7d, "CondorPlatform": "$CondorPlatform: X86_64-RedHat_6.6 $", "JOBGLIDEIN_ResourceName": "$$([IfThenElse(IsUndefined(TARGET.GLIDEIN_ResourceName), IfThenElse(IsUndefined(TARGET.GLIDEIN_Site), \"wisc.edu\", TARGET.GLIDEIN_Site), TARGET.GLIDEIN_ResourceNa
 me)])", "DAGNodeName": "fabp4-0002+fabp4-0002", "PeriodicRelease": "( JobStatus == 5 ) && ( ( CurrentTime - EnteredCurrentStatus ) > 1800 ) && ( JobRunCount < 5 ) && ( HoldReasonCode != 6 ) && ( HoldReasonCode != 14 ) && ( HoldReasonCode != 22 )", "JobRunCount": 7, "LastRemotePool": "condor.biochem.wisc.edu:9618?sock=collector", "JobLastStartDate": 1445546257, "LastRemoteHost": "slot1@cluster-0008.biochem.wisc.edu", "JobPrio": 0, "LocalSysCpu": 0.0d, "ExecutableSize": 7, "RemoteSysCpu": 0.0d, "TransferInput": "/home/ssericksen/dude-14-xdock/ChtcRun/dude14-surf-in/fabp4-0002/,/home/ssericksen/dude-14-xdock/ChtcRun/dude14-surf-in/shared/", "PeriodicHold": false, "WantRemoteIO": true, "CommittedSuspensionTime": 0, "DAGParentNodeNames": "", "CompletionDate": 1446133322, "StreamErr": false, "is_resumable": true, "RecentBlockReadKbytes": 0, "WantRemoteSyscalls": false, "NumShadowStarts": 7, "MachineAttrCpus0": 1, "DAGManNodesLog": "/home/ssericksen/dude-14-xdock/ChtcRun/dude14-surf-out-es
 r1/./mydag.dag.nodes.log", "Owner": "ssericksen", "Requirements": "( ( OpSysMajorVer is 6 ) ) && ( MY.JobUniverse == 12 || MY.JobUniverse == 7 || MY.WantFlocking || MY.WantGlidein || TARGET.PoolName == \"CHTC\" || TARGET.COLLECTOR_HOST_STRING == \"infopool.cs.wisc.edu\" ) && ( TARGET.Arch == \"X86_64\" ) && ( TARGET.OpSys == \"LINUX\" ) && ( TARGET.Disk >= RequestDisk ) && ( TARGET.Memory >= RequestMemory ) && ( TARGET.HasFileTransfer )", "DiskUsage": 35000, "LastRejMatchTime": 1445375317, "JobLeaseDuration": 2400, "ClusterId": 48968872, "BufferSize": 524288, "IsCHTCSubmit": true, "RecentStatsLifetimeStarter": 1200, "CommittedSlotTime": 572046.0d, "Args": "--type=Other --cmdtorun=surflex_run_DUDE_v1.8_esr1.sh --unique=fabp4-0002 --", "Environment": "", "LastPublicClaimId": "<128.104.119.175:9618>#1444067179#3317#...", "Iwd": "/home/ssericksen/dude-14-xdock/ChtcRun/dude14-surf-out-esr1/fabp4-0002", "QDate": 1445354636, "CurrentHosts": 0, "User": "ssericksen@chtc.wisc.edu", "StreamOut
 ": false }
+{ "GlobalJobId": "submit-3.chtc.wisc.edu#49573720.0#1445938922", "StatsLifetimeStarter": 190245, "JobStartDate": 1445943852, "SubmitEventNotes": "DAG Node: 180+180", "JobStatus": 4, "LeaveJobInQueue": false, "StartdPrincipal": "execute-side@matchsession/128.105.245.72", "WantRHEL6": true, "OnExitRemove": true, "JobCurrentStartDate": 1445943852, "CoreSize": 0, "MATCH_EXP_JOBGLIDEIN_ResourceName": "wisc.edu", "Rank": 0.0d, "ExitStatus": 0, "ResidentSetSize": 125000, "RemoteWallClockTime": 190247.0d, "WantCheckpoint": false, "In": "/dev/null", "MaxHosts": 1, "RootDir": "/", "NumRestarts": 0, "RecentBlockWriteKbytes": 0, "DiskUsage_RAW": 1216669, "EnteredCurrentStatus": 1446134099, "ResidentSetSize_RAW": 123680, "RequestDisk": 1000000, "MyType": "Job", "PeriodicRemove": false, "Cmd": "/home/xguo23/finally/Simulation_condor/chtcjobwrapper", "CondorVersion": "$CondorVersion: 8.5.0 Sep 16 2015 BuildID: 341710 $", "ShouldTransferFiles": "YES", "TargetType": "Machine", "MinHosts": 1, "NumCkp
 ts_RAW": 0, "RequestCpus": 1, "RemoteUserCpu": 185236.0d, "BlockWrites": 0, "NiceUser": false, "Out": "process.out", "ImageSize_RAW": 811948, "BytesSent": 30766.0d, "CumulativeSuspensionTime": 0, "TransferIn": false, "NumCkpts": 0, "Err": "process.err", "RecentBlockWrites": 0, "JobFinishedHookDone": 1446134099, "ProcId": 0, "ImageSize": 1000000, "JobUniverse": 5, "EncryptExecuteDirectory": false, "TransferInputSizeMB": 0, "RecentBlockReads": 0, "SpooledOutputFiles": "CURLTIME_3753852,ChtcWrapper180.out,AuditLog.180,simu_3_180.txt,harvest.log,180.out", "BlockWriteKbytes": 0, "WhenToTransferOutput": "ON_EXIT", "JobCurrentStartExecutingDate": 1445943853, "ExitBySignal": false, "LastMatchTime": 1445943852, "OnExitHold": false, "OrigMaxHosts": 1, "RequestMemory": 1000, "NumJobStarts": 1, "TerminationPending": true, "TotalSuspensions": 0, "BlockReads": 0, "DAGManJobId": 49572657, "MemoryUsage": "( ( ResidentSetSize + 1023 ) / 1024 )", "ExitCode": 0, "JobNotification": 0, "BlockReadKbytes"
 : 0, "NumJobMatches": 1, "LocalUserCpu": 0.0d, "LastJobStatus": 2, "BufferBlockSize": 32768, "CommittedTime": 190247, "ExecutableSize_RAW": 6, "LastSuspensionTime": 0, "Matlab": "R2011b", "UserLog": "/home/xguo23/finally/Simulation_condor/model_3/180/process.log", "DAGManNodesMask": "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27", "CumulativeSlotTime": 190247.0d, "LastJobLeaseRenewal": 1446134099, "MachineAttrSlotWeight0": 1, "NumSystemHolds": 0, "BytesRecvd": 284367.0d, "CondorPlatform": "$CondorPlatform: X86_64-RedHat_6.6 $", "JOBGLIDEIN_ResourceName": "$$([IfThenElse(IsUndefined(TARGET.GLIDEIN_ResourceName), IfThenElse(IsUndefined(TARGET.GLIDEIN_Site), \"wisc.edu\", TARGET.GLIDEIN_Site), TARGET.GLIDEIN_ResourceName)])", "DAGNodeName": "180+180", "PeriodicRelease": "( JobStatus == 5 ) && ( ( CurrentTime - EnteredCurrentStatus ) > 1800 ) && ( JobRunCount < 5 ) && ( HoldReasonCode != 6 ) && ( HoldReasonCode != 14 ) && ( HoldReasonCode != 22 )", "JobRunCount": 1, "LastRemoteHost": "slot1@e27
 2.chtc.wisc.edu", "JobPrio": 0, "LocalSysCpu": 0.0d, "ExecutableSize": 7, "RemoteSysCpu": 1835.0d, "TransferInput": "/home/xguo23/finally/Simulation_condor/data/180/,/home/xguo23/finally/Simulation_condor/data/shared/", "PeriodicHold": false, "WantRemoteIO": true, "CommittedSuspensionTime": 0, "DAGParentNodeNames": "", "CompletionDate": 1446134099, "StreamErr": false, "RecentBlockReadKbytes": 0, "WantRemoteSyscalls": false, "NumShadowStarts": 1, "MachineAttrCpus0": 1, "DAGManNodesLog": "/home/xguo23/finally/Simulation_condor/model_3/./mydag.dag.nodes.log", "Owner": "xguo23", "Requirements": "( ( OpSysMajorVer is 6 ) ) && ( MY.JobUniverse == 12 || MY.JobUniverse == 7 || MY.WantFlocking || MY.WantGlidein || TARGET.PoolName == \"CHTC\" || TARGET.COLLECTOR_HOST_STRING == \"infopool.cs.wisc.edu\" ) && ( TARGET.Arch == \"X86_64\" ) && ( TARGET.OpSys == \"LINUX\" ) && ( TARGET.Disk >= RequestDisk ) && ( TARGET.Memory >= RequestMemory ) && ( TARGET.HasFileTransfer )", "DiskUsage": 1250000, 
 "JobLeaseDuration": 2400, "ClusterId": 49573720, "BufferSize": 524288, "IsCHTCSubmit": true, "RecentStatsLifetimeStarter": 1200, "CommittedSlotTime": 190247.0d, "Args": "--type=Matlab --version=R2011b --cmdtorun=simu_condor --unique=180 -- 3", "Environment": "", "LastPublicClaimId": "<128.105.245.72:29075>#1444753997#6000#...", "Iwd": "/home/xguo23/finally/Simulation_condor/model_3/180", "QDate": 1445938922, "CurrentHosts": 0, "User": "xguo23@chtc.wisc.edu", "StreamOut": false }
+{ "GlobalJobId": "submit-3.chtc.wisc.edu#49581952.0#1446105329", "StatsLifetimeStarter": 27674, "JobStartDate": 1446106061, "SubmitEventNotes": "DAG Node: 40+40", "JobStatus": 4, "LeaveJobInQueue": false, "StartdPrincipal": "execute-side@matchsession/128.105.245.86", "WantRHEL6": true, "OnExitRemove": true, "JobCurrentStartDate": 1446106061, "CoreSize": 0, "MATCH_EXP_JOBGLIDEIN_ResourceName": "wisc.edu", "Rank": 0.0d, "ExitStatus": 0, "ResidentSetSize": 150000, "RemoteWallClockTime": 27676.0d, "WantCheckpoint": false, "In": "/dev/null", "MaxHosts": 1, "RootDir": "/", "NumRestarts": 0, "RecentBlockWriteKbytes": 0, "DiskUsage_RAW": 1216669, "EnteredCurrentStatus": 1446133737, "ResidentSetSize_RAW": 127252, "RequestDisk": 1000000, "MyType": "Job", "PeriodicRemove": false, "Cmd": "/home/xguo23/finally_2/Simulation_condor/chtcjobwrapper", "CondorVersion": "$CondorVersion: 8.5.0 Sep 16 2015 BuildID: 341710 $", "ShouldTransferFiles": "YES", "TargetType": "Machine", "MinHosts": 1, "NumCkpts
 _RAW": 0, "RequestCpus": 1, "RemoteUserCpu": 27510.0d, "BlockWrites": 0, "NiceUser": false, "Out": "process.out", "ImageSize_RAW": 811948, "BytesSent": 30584.0d, "CumulativeSuspensionTime": 0, "TransferIn": false, "NumCkpts": 0, "Err": "process.err", "RecentBlockWrites": 0, "JobFinishedHookDone": 1446133737, "ProcId": 0, "ImageSize": 1000000, "JobUniverse": 5, "EncryptExecuteDirectory": false, "TransferInputSizeMB": 0, "RecentBlockReads": 0, "SpooledOutputFiles": "CURLTIME_369560,ChtcWrapper40.out,AuditLog.40,simu_3_40.txt,harvest.log,40.out", "BlockWriteKbytes": 0, "WhenToTransferOutput": "ON_EXIT", "JobCurrentStartExecutingDate": 1446106063, "ExitBySignal": false, "LastMatchTime": 1446106061, "OnExitHold": false, "OrigMaxHosts": 1, "RequestMemory": 1000, "NumJobStarts": 1, "TerminationPending": true, "TotalSuspensions": 0, "BlockReads": 0, "DAGManJobId": 49581933, "MemoryUsage": "( ( ResidentSetSize + 1023 ) / 1024 )", "ExitCode": 0, "JobNotification": 0, "BlockReadKbytes": 0, "Nu
 mJobMatches": 1, "LocalUserCpu": 0.0d, "LastJobStatus": 2, "BufferBlockSize": 32768, "CommittedTime": 27676, "ExecutableSize_RAW": 6, "LastSuspensionTime": 0, "Matlab": "R2011b", "UserLog": "/home/xguo23/finally_2/Simulation_condor/model_3/40/process.log", "DAGManNodesMask": "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27", "CumulativeSlotTime": 27676.0d, "LastJobLeaseRenewal": 1446133737, "MachineAttrSlotWeight0": 1, "NumSystemHolds": 0, "BytesRecvd": 285053.0d, "CondorPlatform": "$CondorPlatform: X86_64-RedHat_6.6 $", "JOBGLIDEIN_ResourceName": "$$([IfThenElse(IsUndefined(TARGET.GLIDEIN_ResourceName), IfThenElse(IsUndefined(TARGET.GLIDEIN_Site), \"wisc.edu\", TARGET.GLIDEIN_Site), TARGET.GLIDEIN_ResourceName)])", "DAGNodeName": "40+40", "PeriodicRelease": "( JobStatus == 5 ) && ( ( CurrentTime - EnteredCurrentStatus ) > 1800 ) && ( JobRunCount < 5 ) && ( HoldReasonCode != 6 ) && ( HoldReasonCode != 14 ) && ( HoldReasonCode != 22 )", "JobRunCount": 1, "LastRemoteHost": "slot1@e286.chtc.wisc
 .edu", "JobPrio": 0, "LocalSysCpu": 0.0d, "ExecutableSize": 7, "RemoteSysCpu": 105.0d, "TransferInput": "/home/xguo23/finally_2/Simulation_condor/data/40/,/home/xguo23/finally_2/Simulation_condor/data/shared/", "PeriodicHold": false, "WantRemoteIO": true, "CommittedSuspensionTime": 0, "DAGParentNodeNames": "", "CompletionDate": 1446133737, "StreamErr": false, "RecentBlockReadKbytes": 0, "WantRemoteSyscalls": false, "NumShadowStarts": 1, "MachineAttrCpus0": 1, "DAGManNodesLog": "/home/xguo23/finally_2/Simulation_condor/model_3/./mydag.dag.nodes.log", "Owner": "xguo23", "Requirements": "( ( OpSysMajorVer is 6 ) ) && ( MY.JobUniverse == 12 || MY.JobUniverse == 7 || MY.WantFlocking || MY.WantGlidein || TARGET.PoolName == \"CHTC\" || TARGET.COLLECTOR_HOST_STRING == \"infopool.cs.wisc.edu\" ) && ( TARGET.Arch == \"X86_64\" ) && ( TARGET.OpSys == \"LINUX\" ) && ( TARGET.Disk >= RequestDisk ) && ( TARGET.Memory >= RequestMemory ) && ( TARGET.HasFileTransfer )", "DiskUsage": 1250000, "JobLea
 seDuration": 2400, "ClusterId": 49581952, "BufferSize": 524288, "IsCHTCSubmit": true, "RecentStatsLifetimeStarter": 1200, "CommittedSlotTime": 27676.0d, "Args": "--type=Matlab --version=R2011b --cmdtorun=simu_condor --unique=40 -- 3", "Environment": "", "LastPublicClaimId": "<128.105.245.86:32129>#1444759888#6329#...", "Iwd": "/home/xguo23/finally_2/Simulation_condor/model_3/40", "QDate": 1446105329, "CurrentHosts": 0, "User": "xguo23@chtc.wisc.edu", "StreamOut": false }
+{ "GlobalJobId": "submit-3.chtc.wisc.edu#49581985.0#1446105368", "StatsLifetimeStarter": 26354, "JobStartDate": 1446106289, "SubmitEventNotes": "DAG Node: 36+36", "JobStatus": 4, "LeaveJobInQueue": false, "StartdPrincipal": "execute-side@matchsession/128.105.244.249", "WantRHEL6": true, "OnExitRemove": true, "JobCurrentStartDate": 1446106289, "CoreSize": 0, "MATCH_EXP_JOBGLIDEIN_ResourceName": "wisc.edu", "Rank": 0.0d, "ExitStatus": 0, "ResidentSetSize": 150000, "RemoteWallClockTime": 26357.0d, "WantCheckpoint": false, "In": "/dev/null", "MaxHosts": 1, "RootDir": "/", "NumRestarts": 0, "RecentBlockWriteKbytes": 0, "DiskUsage_RAW": 1216669, "EnteredCurrentStatus": 1446132646, "ResidentSetSize_RAW": 127452, "RequestDisk": 1000000, "MyType": "Job", "PeriodicRemove": false, "Cmd": "/home/xguo23/finally_2/Simulation_condor/chtcjobwrapper", "CondorVersion": "$CondorVersion: 8.5.0 Sep 16 2015 BuildID: 341710 $", "ShouldTransferFiles": "YES", "TargetType": "Machine", "MinHosts": 1, "NumCkpt
 s_RAW": 0, "RequestCpus": 1, "RemoteUserCpu": 26239.0d, "BlockWrites": 0, "NiceUser": false, "Out": "process.out", "ImageSize_RAW": 811948, "BytesSent": 31898.0d, "CumulativeSuspensionTime": 0, "TransferIn": false, "NumCkpts": 0, "Err": "process.err", "RecentBlockWrites": 0, "JobFinishedHookDone": 1446132646, "ProcId": 0, "ImageSize": 1000000, "JobUniverse": 5, "EncryptExecuteDirectory": false, "TransferInputSizeMB": 0, "RecentBlockReads": 0, "SpooledOutputFiles": "CURLTIME_1151700,ChtcWrapper36.out,AuditLog.36,simu_3_36.txt,harvest.log,36.out", "BlockWriteKbytes": 0, "WhenToTransferOutput": "ON_EXIT", "JobCurrentStartExecutingDate": 1446106291, "ExitBySignal": false, "LastMatchTime": 1446106289, "OnExitHold": false, "OrigMaxHosts": 1, "RequestMemory": 1000, "NumJobStarts": 1, "TerminationPending": true, "TotalSuspensions": 0, "BlockReads": 0, "DAGManJobId": 49581933, "MemoryUsage": "( ( ResidentSetSize + 1023 ) / 1024 )", "ExitCode": 0, "JobNotification": 0, "BlockReadKbytes": 0, "
 NumJobMatches": 1, "LocalUserCpu": 0.0d, "LastJobStatus": 2, "BufferBlockSize": 32768, "CommittedTime": 26357, "ExecutableSize_RAW": 6, "LastSuspensionTime": 0, "Matlab": "R2011b", "UserLog": "/home/xguo23/finally_2/Simulation_condor/model_3/36/process.log", "DAGManNodesMask": "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27", "CumulativeSlotTime": 26357.0d, "LastJobLeaseRenewal": 1446132646, "MachineAttrSlotWeight0": 1, "NumSystemHolds": 0, "BytesRecvd": 285053.0d, "CondorPlatform": "$CondorPlatform: X86_64-RedHat_6.6 $", "JOBGLIDEIN_ResourceName": "$$([IfThenElse(IsUndefined(TARGET.GLIDEIN_ResourceName), IfThenElse(IsUndefined(TARGET.GLIDEIN_Site), \"wisc.edu\", TARGET.GLIDEIN_Site), TARGET.GLIDEIN_ResourceName)])", "DAGNodeName": "36+36", "PeriodicRelease": "( JobStatus == 5 ) && ( ( CurrentTime - EnteredCurrentStatus ) > 1800 ) && ( JobRunCount < 5 ) && ( HoldReasonCode != 6 ) && ( HoldReasonCode != 14 ) && ( HoldReasonCode != 22 )", "JobRunCount": 1, "LastRemoteHost": "slot1@e457.chtc.wi
 sc.edu", "JobPrio": 0, "LocalSysCpu": 0.0d, "ExecutableSize": 7, "RemoteSysCpu": 96.0d, "TransferInput": "/home/xguo23/finally_2/Simulation_condor/data/36/,/home/xguo23/finally_2/Simulation_condor/data/shared/", "PeriodicHold": false, "WantRemoteIO": true, "CommittedSuspensionTime": 0, "DAGParentNodeNames": "", "CompletionDate": 1446132646, "StreamErr": false, "RecentBlockReadKbytes": 0, "WantRemoteSyscalls": false, "NumShadowStarts": 1, "MachineAttrCpus0": 1, "DAGManNodesLog": "/home/xguo23/finally_2/Simulation_condor/model_3/./mydag.dag.nodes.log", "Owner": "xguo23", "Requirements": "( ( OpSysMajorVer is 6 ) ) && ( MY.JobUniverse == 12 || MY.JobUniverse == 7 || MY.WantFlocking || MY.WantGlidein || TARGET.PoolName == \"CHTC\" || TARGET.COLLECTOR_HOST_STRING == \"infopool.cs.wisc.edu\" ) && ( TARGET.Arch == \"X86_64\" ) && ( TARGET.OpSys == \"LINUX\" ) && ( TARGET.Disk >= RequestDisk ) && ( TARGET.Memory >= RequestMemory ) && ( TARGET.HasFileTransfer )", "DiskUsage": 1250000, "JobLe
 aseDuration": 2400, "ClusterId": 49581985, "BufferSize": 524288, "IsCHTCSubmit": true, "RecentStatsLifetimeStarter": 1200, "CommittedSlotTime": 26357.0d, "Args": "--type=Matlab --version=R2011b --cmdtorun=simu_condor --unique=36 -- 3", "Environment": "", "LastPublicClaimId": "<128.105.244.249:28476>#1444685646#10655#...", "Iwd": "/home/xguo23/finally_2/Simulation_condor/model_3/36", "QDate": 1446105368, "CurrentHosts": 0, "User": "xguo23@chtc.wisc.edu", "StreamOut": false }
+{ "GlobalJobId": "submit-3.chtc.wisc.edu#49581989.0#1446105374", "StatsLifetimeStarter": 27490, "JobStartDate": 1446106290, "SubmitEventNotes": "DAG Node: 82+82", "JobStatus": 4, "LeaveJobInQueue": false, "AutoClusterId": 24, "StartdPrincipal": "execute-side@matchsession/128.105.245.233", "WantRHEL6": true, "OnExitRemove": true, "JobCurrentStartDate": 1446106290, "CoreSize": 0, "MATCH_EXP_JOBGLIDEIN_ResourceName": "wisc.edu", "ExitStatus": 0, "Rank": 0.0d, "ResidentSetSize": 150000, "RemoteWallClockTime": 27491.0d, "WantCheckpoint": false, "In": "/dev/null", "MaxHosts": 1, "RootDir": "/", "NumRestarts": 0, "RecentBlockWriteKbytes": 0, "DiskUsage_RAW": 1216669, "EnteredCurrentStatus": 1446133781, "ResidentSetSize_RAW": 126932, "RequestDisk": 1000000, "MyType": "Job", "PeriodicRemove": false, "Cmd": "/home/xguo23/finally_2/Simulation_condor/chtcjobwrapper", "CondorVersion": "$CondorVersion: 8.5.0 Sep 16 2015 BuildID: 341710 $", "ShouldTransferFiles": "YES", "TargetType": "Machine", "M
 inHosts": 1, "NumCkpts_RAW": 0, "RequestCpus": 1, "RemoteUserCpu": 27288.0d, "BlockWrites": 0, "NiceUser": false, "Out": "process.out", "ImageSize_RAW": 811948, "BytesSent": 30553.0d, "CumulativeSuspensionTime": 0, "TransferIn": false, "AutoClusterAttrs": "JobUniverse,LastCheckpointPlatform,NumCkpts,ClientMachine,_condor_RequestCpus,_condor_RequestDisk,_condor_RequestGPUs,_condor_RequestMemory,RequestCpus,RequestDisk,RequestGPUs,RequestMemory,BIOCHEM,MachineLastMatchTime,ConcurrencyLimits,NiceUser,Rank,Requirements,ImageSize,MemoryRequirements,User,RemoteGroup,SubmitterGroup,SubmitterUserPrio,Group,WIDsTheme,InteractiveJob,Is_Resumable,WantFlocking,WantGlidein,Scheduler,Owner,JobStart,MemoryUsage,IsExpressQueueJob,DiskUsage,HEP_VO,IsDesktop,OSG_VO,x509userproxysubject,PassedTest,IsLocalCMSJob,IsLocalCMSSlot,IsSAMSlot,IsSAMJob,MaxDiskTempC,IsDedicated,estimated_run_hours,IsCHTCSubmit,RequiresCVMFS,DynamicSlot,PartitionableSlot,Slot1_ExpectedMachineGracefulDrainingCompletion,Slot1_Job
 Starts,Slot1_SelfMonitorAge,Slot2_ExpectedMachineGracefulDrainingCompletion,Slot2_JobStarts,Slot2_SelfMonitorAge,Slot3_ExpectedMachineGracefulDrainingCompletion,Slot3_JobStarts,Slot3_SelfMonitorAge,Slot4_ExpectedMachineGracefulDrainingCompletion,Slot4_JobStarts,Slot4_SelfMonitorAge,Slot5_ExpectedMachineGracefulDrainingCompletion,Slot5_JobStarts,Slot5_SelfMonitorAge,Slot6_ExpectedMachineGracefulDrainingCompletion,Slot6_JobStarts,Slot6_SelfMonitorAge,Slot7_ExpectedMachineGracefulDrainingCompletion,Slot7_JobStarts,Slot7_SelfMonitorAge,Slot8_ExpectedMachineGracefulDrainingCompletion,Slot8_JobStarts,Slot8_SelfMonitorAge,Slot1_TotalTimeClaimedBusy,Slot1_TotalTimeUnclaimedIdle,Slot2_TotalTimeClaimedBusy,Slot2_TotalTimeUnclaimedIdle,Slot3_TotalTimeClaimedBusy,Slot3_TotalTimeUnclaimedIdle,Slot4_TotalTimeClaimedBusy,Slot4_TotalTimeUnclaimedIdle,Slot5_TotalTimeClaimedBusy,Slot5_TotalTimeUnclaimedIdle,Slot6_TotalTimeClaimedBusy,Slot6_TotalTimeUnclaimedIdle,Slot7_TotalTimeClaimedBusy,Slot7_Total
 TimeUnclaimedIdle,Slot8_TotalTimeClaimedBusy,Slot8_TotalTimeUnclaimedIdle,Slot10_ExpectedMachineGracefulDrainingCompletion,Slot10_JobStarts,Slot10_SelfMonitorAge,Slot11_ExpectedMachineGracefulDrainingCompletion,Slot11_JobStarts,Slot11_SelfMonitorAge,Slot12_ExpectedMachineGracefulDrainingCompletion,Slot12_JobStarts,Slot12_SelfMonitorAge,Slot9_ExpectedMachineGracefulDrainingCompletion,Slot9_JobStarts,Slot9_SelfMonitorAge,Slot12_TotalTimeClaimedBusy,Slot10_TotalTimeClaimedBusy,Slot10_TotalTimeUnclaimedIdle,Slot11_TotalTimeClaimedBusy,Slot11_TotalTimeUnclaimedIdle,Slot12_TotalTimeUnclaimedIdle,Slot9_TotalTimeClaimedBusy,Slot9_TotalTimeUnclaimedIdle,Slot13_ExpectedMachineGracefulDrainingCompletion,Slot13_JobStarts,Slot13_SelfMonitorAge,Slot14_ExpectedMachineGracefulDrainingCompletion,Slot14_JobStarts,Slot14_SelfMonitorAge,Slot15_ExpectedMachineGracefulDrainingCompletion,Slot15_JobStarts,Slot15_SelfMonitorAge,Slot16_ExpectedMachineGracefulDrainingCompletion,Slot16_JobStarts,Slot16_SelfMon
 itorAge,IsResumable,WHEN_TO_TRANSFER_OUTPUT,_condor_Requestadmin_mutex_1,_condor_Requestadmin_mutex_2,_condor_Requestadmin_mutex_3,_condor_Requestmachine_token,Requestadmin_mutex_1,Requestadmin_mutex_2,Requestadmin_mutex_3,Requestmachine_token,nyehle,IsBuildJob,IsMatlabBuildJob,TotalJobRunTime,NodeOnline,Slot13_TotalTimeClaimedBusy,Slot13_TotalTimeUnclaimedIdle,Slot14_TotalTimeClaimedBusy,Slot14_TotalTimeUnclaimedIdle,Slot15_TotalTimeClaimedBusy,Slot15_TotalTimeUnclaimedIdle,Slot16_TotalTimeClaimedBusy,Slot16_TotalTimeUnclaimedIdle,TmpIsFull,trResumable,RequiresCMSFrontier,Slot17_ExpectedMachineGracefulDrainingCompletion,Slot17_JobStarts,Slot17_SelfMonitorAge,Slot17_TotalTimeClaimedBusy,Slot17_TotalTimeUnclaimedIdle,Slot18_ExpectedMachineGracefulDrainingCompletion,Slot18_JobStarts,Slot18_SelfMonitorAge,Slot18_TotalTimeClaimedBusy,Slot18_TotalTimeUnclaimedIdle,Slot19_ExpectedMachineGracefulDrainingCompletion,Slot19_JobStarts,Slot19_SelfMonitorAge,Slot19_TotalTimeClaimedBusy,Slot19_To
 talTimeUnclaimedIdle,Slot20_ExpectedMachineGracefulDrainingCompletion,Slot20_JobStarts,Slot20_SelfMonitorAge,Slot20_TotalTimeClaimedBusy,Slot20_TotalTimeUnclaimedIdle,Slot21_ExpectedMachineGracefulDrainingCompletion,Slot21_JobStarts,Slot21_SelfMonitorAge,Slot21_TotalTimeClaimedBusy,Slot21_TotalTimeUnclaimedIdle,Slot22_ExpectedMachineGracefulDrainingCompletion,Slot22_JobStarts,Slot22_SelfMonitorAge,Slot22_TotalTimeClaimedBusy,Slot22_TotalTimeUnclaimedIdle,Slot23_ExpectedMachineGracefulDrainingCompletion,Slot23_JobStarts,Slot23_SelfMonitorAge,Slot23_TotalTimeClaimedBusy,Slot23_TotalTimeUnclaimedIdle,Slot24_ExpectedMachineGracefulDrainingCompletion,Slot24_JobStarts,Slot24_SelfMonitorAge,Slot24_TotalTimeClaimedBusy,Slot24_TotalTimeUnclaimedIdle,Slot25_ExpectedMachineGracefulDrainingCompletion,Slot25_JobStarts,Slot25_SelfMonitorAge,Slot25_TotalTimeClaimedBusy,Slot25_TotalTimeUnclaimedIdle,Slot26_ExpectedMachineGracefulDrainingCompletion,Slot26_JobStarts,Slot26_SelfMonitorAge,Slot26_Total
 TimeClaimedBusy,Slot26_TotalTimeUnclaimedIdle,Slot27_ExpectedMachineGracefulDrainingCompletion,Slot27_JobStarts,Slot27_SelfMonitorAge,Slot27_TotalTimeClaimedBusy,Slot27_TotalTimeUnclaimedIdle,Slot28_ExpectedMachineGracefulDrainingCompletion,Slot28_JobStarts,Slot28_SelfMonitorAge,Slot28_TotalTimeClaimedBusy,Slot28_TotalTimeUnclaimedIdle,Slot29_ExpectedMachineGracefulDrainingCompletion,Slot29_JobStarts,Slot29_SelfMonitorAge,Slot29_TotalTimeClaimedBusy,Slot29_TotalTimeUnclaimedIdle,Slot30_ExpectedMachineGracefulDrainingCompletion,Slot30_JobStarts,Slot30_SelfMonitorAge,Slot30_TotalTimeClaimedBusy,Slot30_TotalTimeUnclaimedIdle,Slot31_ExpectedMachineGracefulDrainingCompletion,Slot31_JobStarts,Slot31_SelfMonitorAge,Slot31_TotalTimeClaimedBusy,Slot31_TotalTimeUnclaimedIdle,Slot32_ExpectedMachineGracefulDrainingCompletion,Slot32_JobStarts,Slot32_SelfMonitorAge,Slot32_TotalTimeClaimedBusy,Slot32_TotalTimeUnclaimedIdle,ResidentSetSize", "NumCkpts": 0, "Err": "process.err", "RecentBlockWrites":
  0, "JobFinishedHookDone": 1446133782, "ProcId": 0, "ImageSize": 1000000, "JobUniverse": 5, "EncryptExecuteDirectory": false, "TransferInputSizeMB": 0, "RecentBlockReads": 0, "SpooledOutputFiles": "CURLTIME_4096502,ChtcWrapper82.out,AuditLog.82,simu_3_82.txt,harvest.log,82.out", "BlockWriteKbytes": 0, "WhenToTransferOutput": "ON_EXIT", "JobCurrentStartExecutingDate": 1446106291, "ExitBySignal": false, "LastMatchTime": 1446106290, "OnExitHold": false, "OrigMaxHosts": 1, "RequestMemory": 1000, "NumJobStarts": 1, "TerminationPending": true, "TotalSuspensions": 0, "BlockReads": 0, "DAGManJobId": 49581933, "MemoryUsage": "( ( ResidentSetSize + 1023 ) / 1024 )", "ExitCode": 0, "JobNotification": 0, "BlockReadKbytes": 0, "NumJobMatches": 1, "LocalUserCpu": 0.0d, "LastJobStatus": 2, "BufferBlockSize": 32768, "CommittedTime": 27491, "ExecutableSize_RAW": 6, "LastSuspensionTime": 0, "Matlab": "R2011b", "UserLog": "/home/xguo23/finally_2/Simulation_condor/model_3/82/process.log", "DAGManNodesM
 ask": "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27", "CumulativeSlotTime": 27491.0d, "LastJobLeaseRenewal": 1446133781, "MachineAttrSlotWeight0": 1, "NumSystemHolds": 0, "BytesRecvd": 285053.0d, "CondorPlatform": "$CondorPlatform: X86_64-RedHat_6.6 $", "JOBGLIDEIN_ResourceName": "$$([IfThenElse(IsUndefined(TARGET.GLIDEIN_ResourceName), IfThenElse(IsUndefined(TARGET.GLIDEIN_Site), \"wisc.edu\", TARGET.GLIDEIN_Site), TARGET.GLIDEIN_ResourceName)])", "DAGNodeName": "82+82", "PeriodicRelease": "( JobStatus == 5 ) && ( ( CurrentTime - EnteredCurrentStatus ) > 1800 ) && ( JobRunCount < 5 ) && ( HoldReasonCode != 6 ) && ( HoldReasonCode != 14 ) && ( HoldReasonCode != 22 )", "JobRunCount": 1, "LastRemoteHost": "slot1@e433.chtc.wisc.edu", "JobPrio": 0, "LocalSysCpu": 0.0d, "ExecutableSize": 7, "RemoteSysCpu": 173.0d, "TransferInput": "/home/xguo23/finally_2/Simulation_condor/data/82/,/home/xguo23/finally_2/Simulation_condor/data/shared/", "PeriodicHold": false, "WantRemoteIO": true, "CommittedSusp
 ensionTime": 0, "DAGParentNodeNames": "", "CompletionDate": 1446133781, "StreamErr": false, "RecentBlockReadKbytes": 0, "WantRemoteSyscalls": false, "NumShadowStarts": 1, "MachineAttrCpus0": 1, "DAGManNodesLog": "/home/xguo23/finally_2/Simulation_condor/model_3/./mydag.dag.nodes.log", "Owner": "xguo23", "Requirements": "( ( OpSysMajorVer is 6 ) ) && ( MY.JobUniverse == 12 || MY.JobUniverse == 7 || MY.WantFlocking || MY.WantGlidein || TARGET.PoolName == \"CHTC\" || TARGET.COLLECTOR_HOST_STRING == \"infopool.cs.wisc.edu\" ) && ( TARGET.Arch == \"X86_64\" ) && ( TARGET.OpSys == \"LINUX\" ) && ( TARGET.Disk >= RequestDisk ) && ( TARGET.Memory >= RequestMemory ) && ( TARGET.HasFileTransfer )", "DiskUsage": 1250000, "JobLeaseDuration": 2400, "ClusterId": 49581989, "BufferSize": 524288, "IsCHTCSubmit": true, "RecentStatsLifetimeStarter": 1200, "CommittedSlotTime": 27491.0d, "Args": "--type=Matlab --version=R2011b --cmdtorun=simu_condor --unique=82 -- 3", "Environment": "", "LastPublicClaim
 Id": "<128.105.245.233:28601>#1443991451#13496#...", "Iwd": "/home/xguo23/finally_2/Simulation_condor/model_3/82", "QDate": 1446105374, "CurrentHosts": 0, "User": "xguo23@chtc.wisc.edu", "StreamOut": false }
+{ "GlobalJobId": "submit-3.chtc.wisc.edu#49582049.0#1446105441", "StatsLifetimeStarter": 26296, "JobStartDate": 1446106482, "SubmitEventNotes": "DAG Node: 112+112", "JobStatus": 4, "LeaveJobInQueue": false, "StartdPrincipal": "execute-side@matchsession/128.105.245.245", "WantRHEL6": true, "OnExitRemove": true, "JobCurrentStartDate": 1446106482, "CoreSize": 0, "MATCH_EXP_JOBGLIDEIN_ResourceName": "wisc.edu", "Rank": 0.0d, "ExitStatus": 0, "ResidentSetSize": 150000, "RemoteWallClockTime": 26298.0d, "WantCheckpoint": false, "In": "/dev/null", "MaxHosts": 1, "RootDir": "/", "NumRestarts": 0, "RecentBlockWriteKbytes": 0, "DiskUsage_RAW": 1216669, "EnteredCurrentStatus": 1446132780, "ResidentSetSize_RAW": 126892, "RequestDisk": 1000000, "MyType": "Job", "PeriodicRemove": false, "Cmd": "/home/xguo23/finally_2/Simulation_condor/chtcjobwrapper", "CondorVersion": "$CondorVersion: 8.5.0 Sep 16 2015 BuildID: 341710 $", "ShouldTransferFiles": "YES", "TargetType": "Machine", "MinHosts": 1, "NumCk
 pts_RAW": 0, "RequestCpus": 1, "RemoteUserCpu": 26097.0d, "BlockWrites": 0, "NiceUser": false, "Out": "process.out", "ImageSize_RAW": 811948, "BytesSent": 31904.0d, "CumulativeSuspensionTime": 0, "TransferIn": false, "NumCkpts": 0, "Err": "process.err", "RecentBlockWrites": 0, "JobFinishedHookDone": 1446132780, "ProcId": 0, "ImageSize": 1000000, "JobUniverse": 5, "EncryptExecuteDirectory": false, "TransferInputSizeMB": 0, "RecentBlockReads": 0, "SpooledOutputFiles": "CURLTIME_2601607,ChtcWrapper112.out,AuditLog.112,simu_3_112.txt,harvest.log,112.out", "BlockWriteKbytes": 0, "WhenToTransferOutput": "ON_EXIT", "JobCurrentStartExecutingDate": 1446106484, "ExitBySignal": false, "LastMatchTime": 1446106482, "OnExitHold": false, "OrigMaxHosts": 1, "RequestMemory": 1000, "NumJobStarts": 1, "TerminationPending": true, "TotalSuspensions": 0, "BlockReads": 0, "DAGManJobId": 49581933, "MemoryUsage": "( ( ResidentSetSize + 1023 ) / 1024 )", "ExitCode": 0, "JobNotification": 0, "BlockReadKbytes"
 : 0, "NumJobMatches": 1, "LocalUserCpu": 0.0d, "LastJobStatus": 2, "BufferBlockSize": 32768, "CommittedTime": 26298, "ExecutableSize_RAW": 6, "LastSuspensionTime": 0, "Matlab": "R2011b", "UserLog": "/home/xguo23/finally_2/Simulation_condor/model_3/112/process.log", "DAGManNodesMask": "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27", "CumulativeSlotTime": 26298.0d, "LastJobLeaseRenewal": 1446132780, "MachineAttrSlotWeight0": 1, "NumSystemHolds": 0, "BytesRecvd": 285054.0d, "CondorPlatform": "$CondorPlatform: X86_64-RedHat_6.6 $", "JOBGLIDEIN_ResourceName": "$$([IfThenElse(IsUndefined(TARGET.GLIDEIN_ResourceName), IfThenElse(IsUndefined(TARGET.GLIDEIN_Site), \"wisc.edu\", TARGET.GLIDEIN_Site), TARGET.GLIDEIN_ResourceName)])", "DAGNodeName": "112+112", "PeriodicRelease": "( JobStatus == 5 ) && ( ( CurrentTime - EnteredCurrentStatus ) > 1800 ) && ( JobRunCount < 5 ) && ( HoldReasonCode != 6 ) && ( HoldReasonCode != 14 ) && ( HoldReasonCode != 22 )", "JobRunCount": 1, "LastRemoteHost": "slot1@e44
 5.chtc.WISC.EDU", "JobPrio": 0, "LocalSysCpu": 0.0d, "ExecutableSize": 7, "RemoteSysCpu": 164.0d, "TransferInput": "/home/xguo23/finally_2/Simulation_condor/data/112/,/home/xguo23/finally_2/Simulation_condor/data/shared/", "PeriodicHold": false, "WantRemoteIO": true, "CommittedSuspensionTime": 0, "DAGParentNodeNames": "", "CompletionDate": 1446132780, "StreamErr": false, "RecentBlockReadKbytes": 0, "WantRemoteSyscalls": false, "NumShadowStarts": 1, "MachineAttrCpus0": 1, "DAGManNodesLog": "/home/xguo23/finally_2/Simulation_condor/model_3/./mydag.dag.nodes.log", "Owner": "xguo23", "Requirements": "( ( OpSysMajorVer is 6 ) ) && ( MY.JobUniverse == 12 || MY.JobUniverse == 7 || MY.WantFlocking || MY.WantGlidein || TARGET.PoolName == \"CHTC\" || TARGET.COLLECTOR_HOST_STRING == \"infopool.cs.wisc.edu\" ) && ( TARGET.Arch == \"X86_64\" ) && ( TARGET.OpSys == \"LINUX\" ) && ( TARGET.Disk >= RequestDisk ) && ( TARGET.Memory >= RequestMemory ) && ( TARGET.HasFileTransfer )", "DiskUsage": 1250
 000, "JobLeaseDuration": 2400, "ClusterId": 49582049, "BufferSize": 524288, "IsCHTCSubmit": true, "RecentStatsLifetimeStarter": 1200, "CommittedSlotTime": 26298.0d, "Args": "--type=Matlab --version=R2011b --cmdtorun=simu_condor --unique=112 -- 3", "Environment": "", "LastPublicClaimId": "<128.105.245.245:48407>#1443991450#14631#...", "Iwd": "/home/xguo23/finally_2/Simulation_condor/model_3/112", "QDate": 1446105441, "CurrentHosts": 0, "User": "xguo23@chtc.wisc.edu", "StreamOut": false }
+{ "GlobalJobId": "submit-3.chtc.wisc.edu#49582050.0#1446105441", "StatsLifetimeStarter": 27141, "JobStartDate": 1446106482, "SubmitEventNotes": "DAG Node: 301+301", "JobStatus": 4, "LeaveJobInQueue": false, "StartdPrincipal": "execute-side@matchsession/128.105.245.172", "WantRHEL6": true, "OnExitRemove": true, "JobCurrentStartDate": 1446106482, "CoreSize": 0, "MATCH_EXP_JOBGLIDEIN_ResourceName": "wisc.edu", "Rank": 0.0d, "ExitStatus": 0, "ResidentSetSize": 150000, "RemoteWallClockTime": 27143.0d, "WantCheckpoint": false, "In": "/dev/null", "MaxHosts": 1, "RootDir": "/", "NumRestarts": 0, "RecentBlockWriteKbytes": 0, "DiskUsage_RAW": 1216669, "EnteredCurrentStatus": 1446133625, "ResidentSetSize_RAW": 126464, "RequestDisk": 1000000, "MyType": "Job", "PeriodicRemove": false, "Cmd": "/home/xguo23/finally_2/Simulation_condor/chtcjobwrapper", "CondorVersion": "$CondorVersion: 8.5.0 Sep 16 2015 BuildID: 341710 $", "ShouldTransferFiles": "YES", "TargetType": "Machine", "MinHosts": 1, "NumCk
 pts_RAW": 0, "RequestCpus": 1, "RemoteUserCpu": 26895.0d, "BlockWrites": 0, "NiceUser": false, "Out": "process.out", "ImageSize_RAW": 811948, "BytesSent": 31905.0d, "CumulativeSuspensionTime": 0, "TransferIn": false, "NumCkpts": 0, "Err": "process.err", "RecentBlockWrites": 0, "JobFinishedHookDone": 1446133625, "ProcId": 0, "ImageSize": 1000000, "JobUniverse": 5, "EncryptExecuteDirectory": false, "TransferInputSizeMB": 0, "RecentBlockReads": 0, "SpooledOutputFiles": "CURLTIME_2158419,ChtcWrapper301.out,AuditLog.301,simu_3_301.txt,harvest.log,301.out", "BlockWriteKbytes": 0, "WhenToTransferOutput": "ON_EXIT", "JobCurrentStartExecutingDate": 1446106484, "ExitBySignal": false, "LastMatchTime": 1446106482, "OnExitHold": false, "OrigMaxHosts": 1, "RequestMemory": 1000, "NumJobStarts": 1, "TerminationPending": true, "TotalSuspensions": 0, "BlockReads": 0, "DAGManJobId": 49581933, "MemoryUsage": "( ( ResidentSetSize + 1023 ) / 1024 )", "ExitCode": 0, "JobNotification": 0, "BlockReadKbytes"
 : 0, "NumJobMatches": 1, "LocalUserCpu": 0.0d, "LastJobStatus": 2, "BufferBlockSize": 32768, "CommittedTime": 27143, "ExecutableSize_RAW": 6, "LastSuspensionTime": 0, "Matlab": "R2011b", "UserLog": "/home/xguo23/finally_2/Simulation_condor/model_3/301/process.log", "DAGManNodesMask": "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27", "CumulativeSlotTime": 27143.0d, "LastJobLeaseRenewal": 1446133625, "MachineAttrSlotWeight0": 1, "NumSystemHolds": 0, "BytesRecvd": 285054.0d, "CondorPlatform": "$CondorPlatform: X86_64-RedHat_6.6 $", "JOBGLIDEIN_ResourceName": "$$([IfThenElse(IsUndefined(TARGET.GLIDEIN_ResourceName), IfThenElse(IsUndefined(TARGET.GLIDEIN_Site), \"wisc.edu\", TARGET.GLIDEIN_Site), TARGET.GLIDEIN_ResourceName)])", "DAGNodeName": "301+301", "PeriodicRelease": "( JobStatus == 5 ) && ( ( CurrentTime - EnteredCurrentStatus ) > 1800 ) && ( JobRunCount < 5 ) && ( HoldReasonCode != 6 ) && ( HoldReasonCode != 14 ) && ( HoldReasonCode != 22 )", "JobRunCount": 1, "LastRemoteHost": "slot1@e37
 2.chtc.wisc.edu", "JobPrio": 0, "LocalSysCpu": 0.0d, "ExecutableSize": 7, "RemoteSysCpu": 201.0d, "TransferInput": "/home/xguo23/finally_2/Simulation_condor/data/301/,/home/xguo23/finally_2/Simulation_condor/data/shared/", "PeriodicHold": false, "WantRemoteIO": true, "CommittedSuspensionTime": 0, "DAGParentNodeNames": "", "CompletionDate": 1446133625, "StreamErr": false, "RecentBlockReadKbytes": 0, "WantRemoteSyscalls": false, "NumShadowStarts": 1, "MachineAttrCpus0": 1, "DAGManNodesLog": "/home/xguo23/finally_2/Simulation_condor/model_3/./mydag.dag.nodes.log", "Owner": "xguo23", "Requirements": "( ( OpSysMajorVer is 6 ) ) && ( MY.JobUniverse == 12 || MY.JobUniverse == 7 || MY.WantFlocking || MY.WantGlidein || TARGET.PoolName == \"CHTC\" || TARGET.COLLECTOR_HOST_STRING == \"infopool.cs.wisc.edu\" ) && ( TARGET.Arch == \"X86_64\" ) && ( TARGET.OpSys == \"LINUX\" ) && ( TARGET.Disk >= RequestDisk ) && ( TARGET.Memory >= RequestMemory ) && ( TARGET.HasFileTransfer )", "DiskUsage": 1250
 000, "JobLeaseDuration": 2400, "ClusterId": 49582050, "BufferSize": 524288, "IsCHTCSubmit": true, "RecentStatsLifetimeStarter": 1200, "CommittedSlotTime": 27143.0d, "Args": "--type=Matlab --version=R2011b --cmdtorun=simu_condor --unique=301 -- 3", "Environment": "", "LastPublicClaimId": "<128.105.245.172:19856>#1444760019#9307#...", "Iwd": "/home/xguo23/finally_2/Simulation_condor/model_3/301", "QDate": 1446105441, "CurrentHosts": 0, "User": "xguo23@chtc.wisc.edu", "StreamOut": false }
+{ "GlobalJobId": "submit-3.chtc.wisc.edu#49582065.0#1446105458", "StatsLifetimeStarter": 25606, "JobStartDate": 1446107042, "SubmitEventNotes": "DAG Node: 401+401", "JobStatus": 4, "LeaveJobInQueue": false, "AutoClusterId": 24, "StartdPrincipal": "execute-side@matchsession/128.105.245.206", "WantRHEL6": true, "OnExitRemove": true, "JobCurrentStartDate": 1446107042, "CoreSize": 0, "MATCH_EXP_JOBGLIDEIN_ResourceName": "wisc.edu", "ExitStatus": 0, "Rank": 0.0d, "ResidentSetSize": 150000, "RemoteWallClockTime": 25607.0d, "WantCheckpoint": false, "In": "/dev/null", "MaxHosts": 1, "RootDir": "/", "NumRestarts": 0, "RecentBlockWriteKbytes": 0, "DiskUsage_RAW": 1216669, "EnteredCurrentStatus": 1446132649, "ResidentSetSize_RAW": 126608, "RequestDisk": 1000000, "MyType": "Job", "PeriodicRemove": false, "Cmd": "/home/xguo23/finally_2/Simulation_condor/chtcjobwrapper", "CondorVersion": "$CondorVersion: 8.5.0 Sep 16 2015 BuildID: 341710 $", "ShouldTransferFiles": "YES", "TargetType": "Machine", 
 "MinHosts": 1, "NumCkpts_RAW": 0, "RequestCpus": 1, "RemoteUserCpu": 25478.0d, "BlockWrites": 0, "NiceUser": false, "Out": "process.out", "ImageSize_RAW": 811948, "BytesSent": 30661.0d, "CumulativeSuspensionTime": 0, "TransferIn": false, "AutoClusterAttrs": "JobUniverse,LastCheckpointPlatform,NumCkpts,ClientMachine,_condor_RequestCpus,_condor_RequestDisk,_condor_RequestGPUs,_condor_RequestMemory,RequestCpus,RequestDisk,RequestGPUs,RequestMemory,BIOCHEM,MachineLastMatchTime,ConcurrencyLimits,NiceUser,Rank,Requirements,ImageSize,MemoryRequirements,User,RemoteGroup,SubmitterGroup,SubmitterUserPrio,Group,WIDsTheme,InteractiveJob,Is_Resumable,WantFlocking,WantGlidein,Scheduler,Owner,JobStart,MemoryUsage,IsExpressQueueJob,DiskUsage,HEP_VO,IsDesktop,OSG_VO,x509userproxysubject,PassedTest,IsLocalCMSJob,IsLocalCMSSlot,IsSAMSlot,IsSAMJob,MaxDiskTempC,IsDedicated,estimated_run_hours,IsCHTCSubmit,RequiresCVMFS,DynamicSlot,PartitionableSlot,Slot1_ExpectedMachineGracefulDrainingCompletion,Slot1_J
 obStarts,Slot1_SelfMonitorAge,Slot2_ExpectedMachineGracefulDrainingCompletion,Slot2_JobStarts,Slot2_SelfMonitorAge,Slot3_ExpectedMachineGracefulDrainingCompletion,Slot3_JobStarts,Slot3_SelfMonitorAge,Slot4_ExpectedMachineGracefulDrainingCompletion,Slot4_JobStarts,Slot4_SelfMonitorAge,Slot5_ExpectedMachineGracefulDrainingCompletion,Slot5_JobStarts,Slot5_SelfMonitorAge,Slot6_ExpectedMachineGracefulDrainingCompletion,Slot6_JobStarts,Slot6_SelfMonitorAge,Slot7_ExpectedMachineGracefulDrainingCompletion,Slot7_JobStarts,Slot7_SelfMonitorAge,Slot8_ExpectedMachineGracefulDrainingCompletion,Slot8_JobStarts,Slot8_SelfMonitorAge,Slot1_TotalTimeClaimedBusy,Slot1_TotalTimeUnclaimedIdle,Slot2_TotalTimeClaimedBusy,Slot2_TotalTimeUnclaimedIdle,Slot3_TotalTimeClaimedBusy,Slot3_TotalTimeUnclaimedIdle,Slot4_TotalTimeClaimedBusy,Slot4_TotalTimeUnclaimedIdle,Slot5_TotalTimeClaimedBusy,Slot5_TotalTimeUnclaimedIdle,Slot6_TotalTimeClaimedBusy,Slot6_TotalTimeUnclaimedIdle,Slot7_TotalTimeClaimedBusy,Slot7_Tot
 alTimeUnclaimedIdle,Slot8_TotalTimeClaimedBusy,Slot8_TotalTimeUnclaimedIdle,Slot10_ExpectedMachineGracefulDrainingCompletion,Slot10_JobStarts,Slot10_SelfMonitorAge,Slot11_ExpectedMachineGracefulDrainingCompletion,Slot11_JobStarts,Slot11_SelfMonitorAge,Slot12_ExpectedMachineGracefulDrainingCompletion,Slot12_JobStarts,Slot12_SelfMonitorAge,Slot9_ExpectedMachineGracefulDrainingCompletion,Slot9_JobStarts,Slot9_SelfMonitorAge,Slot12_TotalTimeClaimedBusy,Slot10_TotalTimeClaimedBusy,Slot10_TotalTimeUnclaimedIdle,Slot11_TotalTimeClaimedBusy,Slot11_TotalTimeUnclaimedIdle,Slot12_TotalTimeUnclaimedIdle,Slot9_TotalTimeClaimedBusy,Slot9_TotalTimeUnclaimedIdle,Slot13_ExpectedMachineGracefulDrainingCompletion,Slot13_JobStarts,Slot13_SelfMonitorAge,Slot14_ExpectedMachineGracefulDrainingCompletion,Slot14_JobStarts,Slot14_SelfMonitorAge,Slot15_ExpectedMachineGracefulDrainingCompletion,Slot15_JobStarts,Slot15_SelfMonitorAge,Slot16_ExpectedMachineGracefulDrainingCompletion,Slot16_JobStarts,Slot16_SelfM
 onitorAge,IsResumable,WHEN_TO_TRANSFER_OUTPUT,_condor_Requestadmin_mutex_1,_condor_Requestadmin_mutex_2,_condor_Requestadmin_mutex_3,_condor_Requestmachine_token,Requestadmin_mutex_1,Requestadmin_mutex_2,Requestadmin_mutex_3,Requestmachine_token,nyehle,IsBuildJob,IsMatlabBuildJob,TotalJobRunTime,NodeOnline,Slot13_TotalTimeClaimedBusy,Slot13_TotalTimeUnclaimedIdle,Slot14_TotalTimeClaimedBusy,Slot14_TotalTimeUnclaimedIdle,Slot15_TotalTimeClaimedBusy,Slot15_TotalTimeUnclaimedIdle,Slot16_TotalTimeClaimedBusy,Slot16_TotalTimeUnclaimedIdle,TmpIsFull,trResumable,RequiresCMSFrontier,Slot17_ExpectedMachineGracefulDrainingCompletion,Slot17_JobStarts,Slot17_SelfMonitorAge,Slot17_TotalTimeClaimedBusy,Slot17_TotalTimeUnclaimedIdle,Slot18_ExpectedMachineGracefulDrainingCompletion,Slot18_JobStarts,Slot18_SelfMonitorAge,Slot18_TotalTimeClaimedBusy,Slot18_TotalTimeUnclaimedIdle,Slot19_ExpectedMachineGracefulDrainingCompletion,Slot19_JobStarts,Slot19_SelfMonitorAge,Slot19_TotalTimeClaimedBusy,Slot19_
 TotalTimeUnclaimedIdle,Slot20_ExpectedMachineGracefulDrainingCompletion,Slot20_JobStarts,Slot20_SelfMonitorAge,Slot20_TotalTimeClaimedBusy,Slot20_TotalTimeUnclaimedIdle,Slot21_ExpectedMachineGracefulDrainingCompletion,Slot21_JobStarts,Slot21_SelfMonitorAge,Slot21_TotalTimeClaimedBusy,Slot21_TotalTimeUnclaimedIdle,Slot22_ExpectedMachineGracefulDrainingCompletion,Slot22_JobStarts,Slot22_SelfMonitorAge,Slot22_TotalTimeClaimedBusy,Slot22_TotalTimeUnclaimedIdle,Slot23_ExpectedMachineGracefulDrainingCompletion,Slot23_JobStarts,Slot23_SelfMonitorAge,Slot23_TotalTimeClaimedBusy,Slot23_TotalTimeUnclaimedIdle,Slot24_ExpectedMachineGracefulDrainingCompletion,Slot24_JobStarts,Slot24_SelfMonitorAge,Slot24_TotalTimeClaimedBusy,Slot24_TotalTimeUnclaimedIdle,Slot25_ExpectedMachineGracefulDrainingCompletion,Slot25_JobStarts,Slot25_SelfMonitorAge,Slot25_TotalTimeClaimedBusy,Slot25_TotalTimeUnclaimedIdle,Slot26_ExpectedMachineGracefulDrainingCompletion,Slot26_JobStarts,Slot26_SelfMonitorAge,Slot26_Tot
 alTimeClaimedBusy,Slot26_TotalTimeUnclaimedIdle,Slot27_ExpectedMachineGracefulDrainingCompletion,Slot27_JobStarts,Slot27_SelfMonitorAge,Slot27_TotalTimeClaimedBusy,Slot27_TotalTimeUnclaimedIdle,Slot28_ExpectedMachineGracefulDrainingCompletion,Slot28_JobStarts,Slot28_SelfMonitorAge,Slot28_TotalTimeClaimedBusy,Slot28_TotalTimeUnclaimedIdle,Slot29_ExpectedMachineGracefulDrainingCompletion,Slot29_JobStarts,Slot29_SelfMonitorAge,Slot29_TotalTimeClaimedBusy,Slot29_TotalTimeUnclaimedIdle,Slot30_ExpectedMachineGracefulDrainingCompletion,Slot30_JobStarts,Slot30_SelfMonitorAge,Slot30_TotalTimeClaimedBusy,Slot30_TotalTimeUnclaimedIdle,Slot31_ExpectedMachineGracefulDrainingCompletion,Slot31_JobStarts,Slot31_SelfMonitorAge,Slot31_TotalTimeClaimedBusy,Slot31_TotalTimeUnclaimedIdle,Slot32_ExpectedMachineGracefulDrainingCompletion,Slot32_JobStarts,Slot32_SelfMonitorAge,Slot32_TotalTimeClaimedBusy,Slot32_TotalTimeUnclaimedIdle,ResidentSetSize", "NumCkpts": 0, "Err": "process.err", "RecentBlockWrites
 ": 0, "JobFinishedHookDone": 1446132649, "ProcId": 0, "ImageSize": 1000000, "JobUniverse": 5, "EncryptExecuteDirectory": false, "TransferInputSizeMB": 0, "RecentBlockReads": 0, "SpooledOutputFiles": "CURLTIME_1160521,ChtcWrapper401.out,AuditLog.401,simu_3_401.txt,harvest.log,401.out", "BlockWriteKbytes": 0, "WhenToTransferOutput": "ON_EXIT", "JobCurrentStartExecutingDate": 1446107043, "ExitBySignal": false, "LastMatchTime": 1446107042, "OnExitHold": false, "OrigMaxHosts": 1, "RequestMemory": 1000, "NumJobStarts": 1, "TerminationPending": true, "TotalSuspensions": 0, "BlockReads": 0, "DAGManJobId": 49581933, "MemoryUsage": "( ( ResidentSetSize + 1023 ) / 1024 )", "ExitCode": 0, "JobNotification": 0, "BlockReadKbytes": 0, "NumJobMatches": 1, "LocalUserCpu": 0.0d, "LastJobStatus": 2, "BufferBlockSize": 32768, "CommittedTime": 25607, "ExecutableSize_RAW": 6, "LastSuspensionTime": 0, "Matlab": "R2011b", "UserLog": "/home/xguo23/finally_2/Simulation_condor/model_3/401/process.log", "DAGMa
 nNodesMask": "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27", "CumulativeSlotTime": 25607.0d, "LastJobLeaseRenewal": 1446132649, "MachineAttrSlotWeight0": 1, "NumSystemHolds": 0, "BytesRecvd": 285054.0d, "CondorPlatform": "$CondorPlatform: X86_64-RedHat_6.6 $", "JOBGLIDEIN_ResourceName": "$$([IfThenElse(IsUndefined(TARGET.GLIDEIN_ResourceName), IfThenElse(IsUndefined(TARGET.GLIDEIN_Site), \"wisc.edu\", TARGET.GLIDEIN_Site), TARGET.GLIDEIN_ResourceName)])", "DAGNodeName": "401+401", "PeriodicRelease": "( JobStatus == 5 ) && ( ( CurrentTime - EnteredCurrentStatus ) > 1800 ) && ( JobRunCount < 5 ) && ( HoldReasonCode != 6 ) && ( HoldReasonCode != 14 ) && ( HoldReasonCode != 22 )", "JobRunCount": 1, "LastRemoteHost": "slot1@e406.chtc.wisc.edu", "JobPrio": 0, "LocalSysCpu": 0.0d, "ExecutableSize": 7, "RemoteSysCpu": 89.0d, "TransferInput": "/home/xguo23/finally_2/Simulation_condor/data/401/,/home/xguo23/finally_2/Simulation_condor/data/shared/", "PeriodicHold": false, "WantRemoteIO": true, "Comm
 ittedSuspensionTime": 0, "DAGParentNodeNames": "", "CompletionDate": 1446132649, "StreamErr": false, "RecentBlockReadKbytes": 0, "WantRemoteSyscalls": false, "NumShadowStarts": 1, "MachineAttrCpus0": 1, "DAGManNodesLog": "/home/xguo23/finally_2/Simulation_condor/model_3/./mydag.dag.nodes.log", "Owner": "xguo23", "Requirements": "( ( OpSysMajorVer is 6 ) ) && ( MY.JobUniverse == 12 || MY.JobUniverse == 7 || MY.WantFlocking || MY.WantGlidein || TARGET.PoolName == \"CHTC\" || TARGET.COLLECTOR_HOST_STRING == \"infopool.cs.wisc.edu\" ) && ( TARGET.Arch == \"X86_64\" ) && ( TARGET.OpSys == \"LINUX\" ) && ( TARGET.Disk >= RequestDisk ) && ( TARGET.Memory >= RequestMemory ) && ( TARGET.HasFileTransfer )", "DiskUsage": 1250000, "JobLeaseDuration": 2400, "ClusterId": 49582065, "BufferSize": 524288, "IsCHTCSubmit": true, "RecentStatsLifetimeStarter": 1200, "CommittedSlotTime": 25607.0d, "Args": "--type=Matlab --version=R2011b --cmdtorun=simu_condor --unique=401 -- 3", "Environment": "", "LastP
 ublicClaimId": "<128.105.245.206:27946>#1443991437#15826#...", "Iwd": "/home/xguo23/finally_2/Simulation_condor/model_3/401", "QDate": 1446105458, "CurrentHosts": 0, "User": "xguo23@chtc.wisc.edu", "StreamOut": false }
+{ "GlobalJobId": "submit-3.chtc.wisc.edu#49582094.0#1446105491", "StatsLifetimeStarter": 25168, "JobStartDate": 1446107489, "SubmitEventNotes": "DAG Node: 106+106", "JobStatus": 4, "LeaveJobInQueue": false, "StartdPrincipal": "execute-side@matchsession/128.104.55.83", "WantRHEL6": true, "OnExitRemove": true, "JobCurrentStartDate": 1446107489, "CoreSize": 0, "MATCH_EXP_JOBGLIDEIN_ResourceName": "wisc.edu", "Rank": 0.0d, "ExitStatus": 0, "ResidentSetSize": 75000, "RemoteWallClockTime": 25169.0d, "WantCheckpoint": false, "In": "/dev/null", "MaxHosts": 1, "RootDir": "/", "NumRestarts": 0, "RecentBlockWriteKbytes": 4, "DiskUsage_RAW": 1216669, "EnteredCurrentStatus": 1446132658, "ResidentSetSize_RAW": 72016, "RequestDisk": 1000000, "MyType": "Job", "PeriodicRemove": false, "Cmd": "/home/xguo23/finally_2/Simulation_condor/chtcjobwrapper", "CondorVersion": "$CondorVersion: 8.5.0 Sep 16 2015 BuildID: 341710 $", "ShouldTransferFiles": "YES", "TargetType": "Machine", "MinHosts": 1, "NumCkpts_
 RAW": 0, "RequestCpus": 1, "RemoteUserCpu": 24949.0d, "BlockWrites": 1, "NiceUser": false, "Out": "process.out", "ImageSize_RAW": 119520, "BytesSent": 30486.0d, "CumulativeSuspensionTime": 0, "TransferIn": false, "NumCkpts": 0, "Err": "process.err", "RecentBlockWrites": 1, "JobFinishedHookDone": 1446132658, "ProcId": 0, "ImageSize": 125000, "JobUniverse": 5, "EncryptExecuteDirectory": false, "TransferInputSizeMB": 0, "RecentBlockReads": 86, "SpooledOutputFiles": "CURLTIME_122139,ChtcWrapper106.out,AuditLog.106,simu_3_106.txt,harvest.log,106.out", "BlockWriteKbytes": 4, "WhenToTransferOutput": "ON_EXIT", "JobCurrentStartExecutingDate": 1446107490, "ExitBySignal": false, "LastMatchTime": 1446107489, "OnExitHold": false, "OrigMaxHosts": 1, "RequestMemory": 1000, "NumJobStarts": 1, "TerminationPending": true, "TotalSuspensions": 0, "BlockReads": 665, "DAGManJobId": 49581933, "MemoryUsage": "( ( ResidentSetSize + 1023 ) / 1024 )", "ExitCode": 0, "JobNotification": 0, "BlockReadKbytes": 2
 6620, "NumJobMatches": 1, "LocalUserCpu": 0.0d, "LastJobStatus": 2, "BufferBlockSize": 32768, "CommittedTime": 25169, "ExecutableSize_RAW": 6, "LastSuspensionTime": 0, "Matlab": "R2011b", "UserLog": "/home/xguo23/finally_2/Simulation_condor/model_3/106/process.log", "DAGManNodesMask": "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27", "CumulativeSlotTime": 25169.0d, "LastJobLeaseRenewal": 1446132658, "MachineAttrSlotWeight0": 1, "NumSystemHolds": 0, "BytesRecvd": 285054.0d, "CondorPlatform": "$CondorPlatform: X86_64-RedHat_6.6 $", "JOBGLIDEIN_ResourceName": "$$([IfThenElse(IsUndefined(TARGET.GLIDEIN_ResourceName), IfThenElse(IsUndefined(TARGET.GLIDEIN_Site), \"wisc.edu\", TARGET.GLIDEIN_Site), TARGET.GLIDEIN_ResourceName)])", "DAGNodeName": "106+106", "PeriodicRelease": "( JobStatus == 5 ) && ( ( CurrentTime - EnteredCurrentStatus ) > 1800 ) && ( JobRunCount < 5 ) && ( HoldReasonCode != 6 ) && ( HoldReasonCode != 14 ) && ( HoldReasonCode != 22 )", "JobRunCount": 1, "LastRemoteHost": "slot1@c0
 64.chtc.wisc.edu", "JobPrio": 0, "LocalSysCpu": 0.0d, "ExecutableSize": 7, "RemoteSysCpu": 204.0d, "TransferInput": "/home/xguo23/finally_2/Simulation_condor/data/106/,/home/xguo23/finally_2/Simulation_condor/data/shared/", "PeriodicHold": false, "WantRemoteIO": true, "CommittedSuspensionTime": 0, "DAGParentNodeNames": "", "CompletionDate": 1446132658, "StreamErr": false, "RecentBlockReadKbytes": 960, "WantRemoteSyscalls": false, "NumShadowStarts": 1, "MachineAttrCpus0": 1, "DAGManNodesLog": "/home/xguo23/finally_2/Simulation_condor/model_3/./mydag.dag.nodes.log", "Owner": "xguo23", "Requirements": "( ( OpSysMajorVer is 6 ) ) && ( MY.JobUniverse == 12 || MY.JobUniverse == 7 || MY.WantFlocking || MY.WantGlidein || TARGET.PoolName == \"CHTC\" || TARGET.COLLECTOR_HOST_STRING == \"infopool.cs.wisc.edu\" ) && ( TARGET.Arch == \"X86_64\" ) && ( TARGET.OpSys == \"LINUX\" ) && ( TARGET.Disk >= RequestDisk ) && ( TARGET.Memory >= RequestMemory ) && ( TARGET.HasFileTransfer )", "DiskUsage": 1
 250000, "JobLeaseDuration": 2400, "ClusterId": 49582094, "BufferSize": 524288, "IsCHTCSubmit": true, "RecentStatsLifetimeStarter": 1200, "CommittedSlotTime": 25169.0d, "Args": "--type=Matlab --version=R2011b --cmdtorun=simu_condor --unique=106 -- 3", "Environment": "", "LastPublicClaimId": "<128.104.55.83:25899>#1445308581#1240#...", "Iwd": "/home/xguo23/finally_2/Simulation_condor/model_3/106", "QDate": 1446105491, "CurrentHosts": 0, "User": "xguo23@chtc.wisc.edu", "StreamOut": false }
+{ "GlobalJobId": "submit-3.chtc.wisc.edu#49582098.0#1446105492", "StatsLifetimeStarter": 26020, "JobStartDate": 1446107489, "SubmitEventNotes": "DAG Node: 304+304", "JobStatus": 4, "LeaveJobInQueue": false, "StartdPrincipal": "execute-side@matchsession/128.105.245.223", "WantRHEL6": true, "OnExitRemove": true, "JobCurrentStartDate": 1446107489, "CoreSize": 0, "MATCH_EXP_JOBGLIDEIN_ResourceName": "wisc.edu", "Rank": 0.0d, "ExitStatus": 0, "ResidentSetSize": 150000, "RemoteWallClockTime": 26022.0d, "WantCheckpoint": false, "In": "/dev/null", "MaxHosts": 1, "RootDir": "/", "NumRestarts": 0, "RecentBlockWriteKbytes": 0, "DiskUsage_RAW": 1216669, "EnteredCurrentStatus": 1446133511, "ResidentSetSize_RAW": 128776, "RequestDisk": 1000000, "MyType": "Job", "PeriodicRemove": false, "Cmd": "/home/xguo23/finally_2/Simulation_condor/chtcjobwrapper", "CondorVersion": "$CondorVersion: 8.5.0 Sep 16 2015 BuildID: 341710 $", "ShouldTransferFiles": "YES", "TargetType": "Machine", "MinHosts": 1, "NumCk
 pts_RAW": 0, "RequestCpus": 1, "RemoteUserCpu": 25844.0d, "BlockWrites": 0, "NiceUser": false, "Out": "process.out", "ImageSize_RAW": 811948, "BytesSent": 31801.0d, "CumulativeSuspensionTime": 0, "TransferIn": false, "NumCkpts": 0, "Err": "process.err", "RecentBlockWrites": 0, "JobFinishedHookDone": 1446133511, "ProcId": 0, "ImageSize": 1000000, "JobUniverse": 5, "EncryptExecuteDirectory": false, "TransferInputSizeMB": 0, "RecentBlockReads": 0, "SpooledOutputFiles": "CURLTIME_3651606,ChtcWrapper304.out,AuditLog.304,simu_3_304.txt,harvest.log,304.out", "BlockWriteKbytes": 0, "WhenToTransferOutput": "ON_EXIT", "JobCurrentStartExecutingDate": 1446107491, "ExitBySignal": false, "LastMatchTime": 1446107489, "OnExitHold": false, "OrigMaxHosts": 1, "RequestMemory": 1000, "NumJobStarts": 1, "TerminationPending": true, "TotalSuspensions": 0, "BlockReads": 0, "DAGManJobId": 49581933, "MemoryUsage": "( ( ResidentSetSize + 1023 ) / 1024 )", "ExitCode": 0, "JobNotification": 0, "BlockReadKbytes"
 : 0, "NumJobMatches": 1, "LocalUserCpu": 0.0d, "LastJobStatus": 2, "BufferBlockSize": 32768, "CommittedTime": 26022, "ExecutableSize_RAW": 6, "LastSuspensionTime": 0, "Matlab": "R2011b", "UserLog": "/home/xguo23/finally_2/Simulation_condor/model_3/304/process.log", "DAGManNodesMask": "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27", "CumulativeSlotTime": 26022.0d, "LastJobLeaseRenewal": 1446133511, "MachineAttrSlotWeight0": 1, "NumSystemHolds": 0, "BytesRecvd": 285054.0d, "CondorPlatform": "$CondorPlatform: X86_64-RedHat_6.6 $", "JOBGLIDEIN_ResourceName": "$$([IfThenElse(IsUndefined(TARGET.GLIDEIN_ResourceName), IfThenElse(IsUndefined(TARGET.GLIDEIN_Site), \"wisc.edu\", TARGET.GLIDEIN_Site), TARGET.GLIDEIN_ResourceName)])", "DAGNodeName": "304+304", "PeriodicRelease": "( JobStatus == 5 ) && ( ( CurrentTime - EnteredCurrentStatus ) > 1800 ) && ( JobRunCount < 5 ) && ( HoldReasonCode != 6 ) && ( HoldReasonCode != 14 ) && ( HoldReasonCode != 22 )", "JobRunCount": 1, "LastRemoteHost": "slot1@e42
 3.chtc.wisc.edu", "JobPrio": 0, "LocalSysCpu": 0.0d, "ExecutableSize": 7, "RemoteSysCpu": 143.0d, "TransferInput": "/home/xguo23/finally_2/Simulation_condor/data/304/,/home/xguo23/finally_2/Simulation_condor/data/shared/", "PeriodicHold": false, "WantRemoteIO": true, "CommittedSuspensionTime": 0, "DAGParentNodeNames": "", "CompletionDate": 1446133511, "StreamErr": false, "RecentBlockReadKbytes": 0, "WantRemoteSyscalls": false, "NumShadowStarts": 1, "MachineAttrCpus0": 1, "DAGManNodesLog": "/home/xguo23/finally_2/Simulation_condor/model_3/./mydag.dag.nodes.log", "Owner": "xguo23", "Requirements": "( ( OpSysMajorVer is 6 ) ) && ( MY.JobUniverse == 12 || MY.JobUniverse == 7 || MY.WantFlocking || MY.WantGlidein || TARGET.PoolName == \"CHTC\" || TARGET.COLLECTOR_HOST_STRING == \"infopool.cs.wisc.edu\" ) && ( TARGET.Arch == \"X86_64\" ) && ( TARGET.OpSys == \"LINUX\" ) && ( TARGET.Disk >= RequestDisk ) && ( TARGET.Memory >= RequestMemory ) && ( TARGET.HasFileTransfer )", "DiskUsage": 1250
 000, "JobLeaseDuration": 2400, "ClusterId": 49582098, "BufferSize": 524288, "IsCHTCSubmit": true, "RecentStatsLifetimeStarter": 1200, "CommittedSlotTime": 26022.0d, "Args": "--type=Matlab --version=R2011b --cmdtorun=simu_condor --unique=304 -- 3", "Environment": "", "LastPublicClaimId": "<128.105.245.223:13467>#1444760039#6376#...", "Iwd": "/home/xguo23/finally_2/Simulation_condor/model_3/304", "QDate": 1446105492, "CurrentHosts": 0, "User": "xguo23@chtc.wisc.edu", "StreamOut": false }
+{ "GlobalJobId": "submit-3.chtc.wisc.edu#49582113.0#1446105509", "StatsLifetimeStarter": 26044, "JobStartDate": 1446107490, "SubmitEventNotes": "DAG Node: 206+206", "JobStatus": 4, "LeaveJobInQueue": false, "StartdPrincipal": "execute-side@matchsession/128.105.245.120", "WantRHEL6": true, "OnExitRemove": true, "JobCurrentStartDate": 1446107490, "CoreSize": 0, "MATCH_EXP_JOBGLIDEIN_ResourceName": "wisc.edu", "Rank": 0.0d, "ExitStatus": 0, "ResidentSetSize": 150000, "RemoteWallClockTime": 26045.0d, "WantCheckpoint": false, "In": "/dev/null", "MaxHosts": 1, "RootDir": "/", "NumRestarts": 0, "RecentBlockWriteKbytes": 0, "DiskUsage_RAW": 1216669, "EnteredCurrentStatus": 1446133535, "ResidentSetSize_RAW": 126460, "RequestDisk": 1000000, "MyType": "Job", "PeriodicRemove": false, "Cmd": "/home/xguo23/finally_2/Simulation_condor/chtcjobwrapper", "CondorVersion": "$CondorVersion: 8.5.0 Sep 16 2015 BuildID: 341710 $", "ShouldTransferFiles": "YES", "TargetType": "Machine", "MinHosts": 1, "NumCk
 pts_RAW": 0, "RequestCpus": 1, "RemoteUserCpu": 25939.0d, "BlockWrites": 0, "NiceUser": false, "Out": "process.out", "ImageSize_RAW": 811948, "BytesSent": 30596.0d, "CumulativeSuspensionTime": 0, "TransferIn": false, "NumCkpts": 0, "Err": "process.err", "RecentBlockWrites": 0, "JobFinishedHookDone": 1446133535, "ProcId": 0, "ImageSize": 1000000, "JobUniverse": 5, "EncryptExecuteDirectory": false, "TransferInputSizeMB": 0, "RecentBlockReads": 0, "SpooledOutputFiles": "CURLTIME_522843,ChtcWrapper206.out,AuditLog.206,simu_3_206.txt,harvest.log,206.out", "BlockWriteKbytes": 0, "WhenToTransferOutput": "ON_EXIT", "JobCurrentStartExecutingDate": 1446107491, "ExitBySignal": false, "LastMatchTime": 1446107490, "OnExitHold": false, "OrigMaxHosts": 1, "RequestMemory": 1000, "NumJobStarts": 1, "TerminationPending": true, "TotalSuspensions": 0, "BlockReads": 0, "DAGManJobId": 49581933, "MemoryUsage": "( ( ResidentSetSize + 1023 ) / 1024 )", "ExitCode": 0, "JobNotification": 0, "BlockReadKbytes":
  0, "NumJobMatches": 1, "LocalUserCpu": 0.0d, "LastJobStatus": 2, "BufferBlockSize": 32768, "CommittedTime": 26045, "ExecutableSize_RAW": 6, "LastSuspensionTime": 0, "Matlab": "R2011b", "UserLog": "/home/xguo23/finally_2/Simulation_condor/model_3/206/process.log", "DAGManNodesMask": "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27", "CumulativeSlotTime": 26045.0d, "LastJobLeaseRenewal": 1446133535, "MachineAttrSlotWeight0": 1, "NumSystemHolds": 0, "BytesRecvd": 285054.0d, "CondorPlatform": "$CondorPlatform: X86_64-RedHat_6.6 $", "JOBGLIDEIN_ResourceName": "$$([IfThenElse(IsUndefined(TARGET.GLIDEIN_ResourceName), IfThenElse(IsUndefined(TARGET.GLIDEIN_Site), \"wisc.edu\", TARGET.GLIDEIN_Site), TARGET.GLIDEIN_ResourceName)])", "DAGNodeName": "206+206", "PeriodicRelease": "( JobStatus == 5 ) && ( ( CurrentTime - EnteredCurrentStatus ) > 1800 ) && ( JobRunCount < 5 ) && ( HoldReasonCode != 6 ) && ( HoldReasonCode != 14 ) && ( HoldReasonCode != 22 )", "JobRunCount": 1, "LastRemoteHost": "slot1@e320
 .chtc.wisc.edu", "JobPrio": 0, "LocalSysCpu": 0.0d, "ExecutableSize": 7, "RemoteSysCpu": 87.0d, "TransferInput": "/home/xguo23/finally_2/Simulation_condor/data/206/,/home/xguo23/finally_2/Simulation_condor/data/shared/", "PeriodicHold": false, "WantRemoteIO": true, "CommittedSuspensionTime": 0, "DAGParentNodeNames": "", "CompletionDate": 1446133535, "StreamErr": false, "RecentBlockReadKbytes": 0, "WantRemoteSyscalls": false, "NumShadowStarts": 1, "MachineAttrCpus0": 1, "DAGManNodesLog": "/home/xguo23/finally_2/Simulation_condor/model_3/./mydag.dag.nodes.log", "Owner": "xguo23", "Requirements": "( ( OpSysMajorVer is 6 ) ) && ( MY.JobUniverse == 12 || MY.JobUniverse == 7 || MY.WantFlocking || MY.WantGlidein || TARGET.PoolName == \"CHTC\" || TARGET.COLLECTOR_HOST_STRING == \"infopool.cs.wisc.edu\" ) && ( TARGET.Arch == \"X86_64\" ) && ( TARGET.OpSys == \"LINUX\" ) && ( TARGET.Disk >= RequestDisk ) && ( TARGET.Memory >= RequestMemory ) && ( TARGET.HasFileTransfer )", "DiskUsage": 125000
 0, "JobLeaseDuration": 2400, "ClusterId": 49582113, "BufferSize": 524288, "IsCHTCSubmit": true, "RecentStatsLifetimeStarter": 1200, "CommittedSlotTime": 26045.0d, "Args": "--type=Matlab --version=R2011b --cmdtorun=simu_condor --unique=206 -- 3", "Environment": "", "LastPublicClaimId": "<128.105.245.120:45185>#1443991409#14238#...", "Iwd": "/home/xguo23/finally_2/Simulation_condor/model_3/206", "QDate": 1446105509, "CurrentHosts": 0, "User": "xguo23@chtc.wisc.edu", "StreamOut": false }
+{ "GlobalJobId": "submit-3.chtc.wisc.edu#49582119.0#1446105519", "StatsLifetimeStarter": 24928, "JobStartDate": 1446107490, "SubmitEventNotes": "DAG Node: 152+152", "JobStatus": 4, "LeaveJobInQueue": false, "StartdPrincipal": "execute-side@matchsession/128.105.245.242", "WantRHEL6": true, "OnExitRemove": true, "JobCurrentStartDate": 1446107490, "CoreSize": 0, "MATCH_EXP_JOBGLIDEIN_ResourceName": "wisc.edu", "Rank": 0.0d, "ExitStatus": 0, "ResidentSetSize": 150000, "RemoteWallClockTime": 24930.0d, "WantCheckpoint": false, "In": "/dev/null", "MaxHosts": 1, "RootDir": "/", "NumRestarts": 0, "RecentBlockWriteKbytes": 0, "DiskUsage_RAW": 1216669, "EnteredCurrentStatus": 1446132420, "ResidentSetSize_RAW": 128972, "RequestDisk": 1000000, "MyType": "Job", "PeriodicRemove": false, "Cmd": "/home/xguo23/finally_2/Simulation_condor/chtcjobwrapper", "CondorVersion": "$CondorVersion: 8.5.0 Sep 16 2015 BuildID: 341710 $", "ShouldTransferFiles": "YES", "TargetType": "Machine", "MinHosts": 1, "NumCk
 pts_RAW": 0, "RequestCpus": 1, "RemoteUserCpu": 24742.0d, "BlockWrites": 0, "NiceUser": false, "Out": "process.out", "ImageSize_RAW": 811948, "BytesSent": 30431.0d, "CumulativeSuspensionTime": 0, "TransferIn": false, "NumCkpts": 0, "Err": "process.err", "RecentBlockWrites": 0, "JobFinishedHookDone": 1446132420, "ProcId": 0, "ImageSize": 1000000, "JobUniverse": 5, "EncryptExecuteDirectory": false, "TransferInputSizeMB": 0, "RecentBlockReads": 0, "SpooledOutputFiles": "CURLTIME_674,ChtcWrapper152.out,AuditLog.152,simu_3_152.txt,harvest.log,152.out", "BlockWriteKbytes": 0, "WhenToTransferOutput": "ON_EXIT", "JobCurrentStartExecutingDate": 1446107491, "ExitBySignal": false, "LastMatchTime": 1446107490, "OnExitHold": false, "OrigMaxHosts": 1, "RequestMemory": 1000, "NumJobStarts": 1, "TerminationPending": true, "TotalSuspensions": 0, "BlockReads": 0, "DAGManJobId": 49581933, "MemoryUsage": "( ( ResidentSetSize + 1023 ) / 1024 )", "ExitCode": 0, "JobNotification": 0, "BlockReadKbytes": 0,
  "NumJobMatches": 1, "LocalUserCpu": 0.0d, "LastJobStatus": 2, "BufferBlockSize": 32768, "CommittedTime": 24930, "ExecutableSize_RAW": 6, "LastSuspensionTime": 0, "Matlab": "R2011b", "UserLog": "/home/xguo23/finally_2/Simulation_condor/model_3/152/process.log", "DAGManNodesMask": "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27", "CumulativeSlotTime": 24930.0d, "LastJobLeaseRenewal": 1446132420, "MachineAttrSlotWeight0": 1, "NumSystemHolds": 0, "BytesRecvd": 285054.0d, "CondorPlatform": "$CondorPlatform: X86_64-RedHat_6.6 $", "JOBGLIDEIN_ResourceName": "$$([IfThenElse(IsUndefined(TARGET.GLIDEIN_ResourceName), IfThenElse(IsUndefined(TARGET.GLIDEIN_Site), \"wisc.edu\", TARGET.GLIDEIN_Site), TARGET.GLIDEIN_ResourceName)])", "DAGNodeName": "152+152", "PeriodicRelease": "( JobStatus == 5 ) && ( ( CurrentTime - EnteredCurrentStatus ) > 1800 ) && ( JobRunCount < 5 ) && ( HoldReasonCode != 6 ) && ( HoldReasonCode != 14 ) && ( HoldReasonCode != 22 )", "JobRunCount": 1, "LastRemoteHost": "slot1@e442.ch
 tc.WISC.EDU", "JobPrio": 0, "LocalSysCpu": 0.0d, "ExecutableSize": 7, "RemoteSysCpu": 156.0d, "TransferInput": "/home/xguo23/finally_2/Simulation_condor/data/152/,/home/xguo23/finally_2/Simulation_condor/data/shared/", "PeriodicHold": false, "WantRemoteIO": true, "CommittedSuspensionTime": 0, "DAGParentNodeNames": "", "CompletionDate": 1446132420, "StreamErr": false, "RecentBlockReadKbytes": 0, "WantRemoteSyscalls": false, "NumShadowStarts": 1, "MachineAttrCpus0": 1, "DAGManNodesLog": "/home/xguo23/finally_2/Simulation_condor/model_3/./mydag.dag.nodes.log", "Owner": "xguo23", "Requirements": "( ( OpSysMajorVer is 6 ) ) && ( MY.JobUniverse == 12 || MY.JobUniverse == 7 || MY.WantFlocking || MY.WantGlidein || TARGET.PoolName == \"CHTC\" || TARGET.COLLECTOR_HOST_STRING == \"infopool.cs.wisc.edu\" ) && ( TARGET.Arch == \"X86_64\" ) && ( TARGET.OpSys == \"LINUX\" ) && ( TARGET.Disk >= RequestDisk ) && ( TARGET.Memory >= RequestMemory ) && ( TARGET.HasFileTransfer )", "DiskUsage": 1250000,
  "JobLeaseDuration": 2400, "ClusterId": 49582119, "BufferSize": 524288, "IsCHTCSubmit": true, "RecentStatsLifetimeStarter": 1200, "CommittedSlotTime": 24930.0d, "Args": "--type=Matlab --version=R2011b --cmdtorun=simu_condor --unique=152 -- 3", "Environment": "", "LastPublicClaimId": "<128.105.245.242:38884>#1443991450#10374#...", "Iwd": "/home/xguo23/finally_2/Simulation_condor/model_3/152", "QDate": 1446105519, "CurrentHosts": 0, "User": "xguo23@chtc.wisc.edu", "StreamOut": false }
+{ "GlobalJobId": "submit-3.chtc.wisc.edu#49582124.0#1446105525", "StatsLifetimeStarter": 24745, "JobStartDate": 1446107685, "SubmitEventNotes": "DAG Node: 323+323", "JobStatus": 4, "LeaveJobInQueue": false, "AutoClusterId": 13, "StartdPrincipal": "execute-side@matchsession/128.104.55.89", "WantRHEL6": true, "OnExitRemove": true, "JobCurrentStartDate": 1446107685, "CoreSize": 0, "MATCH_EXP_JOBGLIDEIN_ResourceName": "wisc.edu", "ExitStatus": 0, "Rank": 0.0d, "ResidentSetSize": 75000, "RemoteWallClockTime": 24748.0d, "WantCheckpoint": false, "In": "/dev/null", "MaxHosts": 1, "RootDir": "/", "NumRestarts": 0, "RecentBlockWriteKbytes": 0, "DiskUsage_RAW": 1216669, "EnteredCurrentStatus": 1446132433, "ResidentSetSize_RAW": 71248, "RequestDisk": 1000000, "MyType": "Job", "PeriodicRemove": false, "Cmd": "/home/xguo23/finally_2/Simulation_condor/chtcjobwrapper", "CondorVersion": "$CondorVersion: 8.5.0 Sep 16 2015 BuildID: 341710 $", "ShouldTransferFiles": "YES", "TargetType": "Machine", "Min
 Hosts": 1, "NumCkpts_RAW": 0, "RequestCpus": 1, "RemoteUserCpu": 21145.0d, "BlockWrites": 1, "NiceUser": false, "Out": "process.out", "ImageSize_RAW": 118000, "BytesSent": 30560.0d, "CumulativeSuspensionTime": 0, "TransferIn": false, "AutoClusterAttrs": "JobUniverse,LastCheckpointPlatform,NumCkpts,ClientMachine,_condor_RequestCpus,_condor_RequestDisk,_condor_RequestGPUs,_condor_RequestMemory,RequestCpus,RequestDisk,RequestGPUs,RequestMemory,BIOCHEM,MachineLastMatchTime,ConcurrencyLimits,NiceUser,Rank,Requirements,ImageSize,MemoryRequirements,User,RemoteGroup,SubmitterGroup,SubmitterUserPrio,Group,WIDsTheme,InteractiveJob,Is_Resumable,WantFlocking,WantGlidein,Scheduler,Owner,JobStart,MemoryUsage,IsExpressQueueJob,DiskUsage,HEP_VO,IsDesktop,OSG_VO,x509userproxysubject,PassedTest,IsLocalCMSJob,IsLocalCMSSlot,IsSAMSlot,IsSAMJob,MaxDiskTempC,IsDedicated,estimated_run_hours,IsCHTCSubmit,RequiresCVMFS,DynamicSlot,PartitionableSlot,Slot1_ExpectedMachineGracefulDrainingCompletion,Slot1_JobSt
 arts,Slot1_SelfMonitorAge,Slot2_ExpectedMachineGracefulDrainingCompletion,Slot2_JobStarts,Slot2_SelfMonitorAge,Slot3_ExpectedMachineGracefulDrainingCompletion,Slot3_JobStarts,Slot3_SelfMonitorAge,Slot4_ExpectedMachineGracefulDrainingCompletion,Slot4_JobStarts,Slot4_SelfMonitorAge,Slot5_ExpectedMachineGracefulDrainingCompletion,Slot5_JobStarts,Slot5_SelfMonitorAge,Slot6_ExpectedMachineGracefulDrainingCompletion,Slot6_JobStarts,Slot6_SelfMonitorAge,Slot7_ExpectedMachineGracefulDrainingCompletion,Slot7_JobStarts,Slot7_SelfMonitorAge,Slot8_ExpectedMachineGracefulDrainingCompletion,Slot8_JobStarts,Slot8_SelfMonitorAge,Slot1_TotalTimeClaimedBusy,Slot1_TotalTimeUnclaimedIdle,Slot2_TotalTimeClaimedBusy,Slot2_TotalTimeUnclaimedIdle,Slot3_TotalTimeClaimedBusy,Slot3_TotalTimeUnclaimedIdle,Slot4_TotalTimeClaimedBusy,Slot4_TotalTimeUnclaimedIdle,Slot5_TotalTimeClaimedBusy,Slot5_TotalTimeUnclaimedIdle,Slot6_TotalTimeClaimedBusy,Slot6_TotalTimeUnclaimedIdle,Slot7_TotalTimeClaimedBusy,Slot7_TotalTi
 meUnclaimedIdle,Slot8_TotalTimeClaimedBusy,Slot8_TotalTimeUnclaimedIdle,Slot10_ExpectedMachineGracefulDrainingCompletion,Slot10_JobStarts,Slot10_SelfMonitorAge,Slot11_ExpectedMachineGracefulDrainingCompletion,Slot11_JobStarts,Slot11_SelfMonitorAge,Slot12_ExpectedMachineGracefulDrainingCompletion,Slot12_JobStarts,Slot12_SelfMonitorAge,Slot9_ExpectedMachineGracefulDrainingCompletion,Slot9_JobStarts,Slot9_SelfMonitorAge,Slot12_TotalTimeClaimedBusy,Slot10_TotalTimeClaimedBusy,Slot10_TotalTimeUnclaimedIdle,Slot11_TotalTimeClaimedBusy,Slot11_TotalTimeUnclaimedIdle,Slot12_TotalTimeUnclaimedIdle,Slot9_TotalTimeClaimedBusy,Slot9_TotalTimeUnclaimedIdle,Slot13_ExpectedMachineGracefulDrainingCompletion,Slot13_JobStarts,Slot13_SelfMonitorAge,Slot14_ExpectedMachineGracefulDrainingCompletion,Slot14_JobStarts,Slot14_SelfMonitorAge,Slot15_ExpectedMachineGracefulDrainingCompletion,Slot15_JobStarts,Slot15_SelfMonitorAge,Slot16_ExpectedMachineGracefulDrainingCompletion,Slot16_JobStarts,Slot16_SelfMonit
 orAge,IsResumable,WHEN_TO_TRANSFER_OUTPUT,_condor_Requestadmin_mutex_1,_condor_Requestadmin_mutex_2,_condor_Requestadmin_mutex_3,_condor_Requestmachine_token,Requestadmin_mutex_1,Requestadmin_mutex_2,Requestadmin_mutex_3,Requestmachine_token,nyehle,IsBuildJob,IsMatlabBuildJob,TotalJobRunTime,NodeOnline,Slot13_TotalTimeClaimedBusy,Slot13_TotalTimeUnclaimedIdle,Slot14_TotalTimeClaimedBusy,Slot14_TotalTimeUnclaimedIdle,Slot15_TotalTimeClaimedBusy,Slot15_TotalTimeUnclaimedIdle,Slot16_TotalTimeClaimedBusy,Slot16_TotalTimeUnclaimedIdle,TmpIsFull,trResumable,RequiresCMSFrontier,Slot17_ExpectedMachineGracefulDrainingCompletion,Slot17_JobStarts,Slot17_SelfMonitorAge,Slot17_TotalTimeClaimedBusy,Slot17_TotalTimeUnclaimedIdle,Slot18_ExpectedMachineGracefulDrainingCompletion,Slot18_JobStarts,Slot18_SelfMonitorAge,Slot18_TotalTimeClaimedBusy,Slot18_TotalTimeUnclaimedIdle,Slot19_ExpectedMachineGracefulDrainingCompletion,Slot19_JobStarts,Slot19_SelfMonitorAge,Slot19_TotalTimeClaimedBusy,Slot19_Tota
 lTimeUnclaimedIdle,Slot20_ExpectedMachineGracefulDrainingCompletion,Slot20_JobStarts,Slot20_SelfMonitorAge,Slot20_TotalTimeClaimedBusy,Slot20_TotalTimeUnclaimedIdle,Slot21_ExpectedMachineGracefulDrainingCompletion,Slot21_JobStarts,Slot21_SelfMonitorAge,Slot21_TotalTimeClaimedBusy,Slot21_TotalTimeUnclaimedIdle,Slot22_ExpectedMachineGracefulDrainingCompletion,Slot22_JobStarts,Slot22_SelfMonitorAge,Slot22_TotalTimeClaimedBusy,Slot22_TotalTimeUnclaimedIdle,Slot23_ExpectedMachineGracefulDrainingCompletion,Slot23_JobStarts,Slot23_SelfMonitorAge,Slot23_TotalTimeClaimedBusy,Slot23_TotalTimeUnclaimedIdle,Slot24_ExpectedMachineGracefulDrainingCompletion,Slot24_JobStarts,Slot24_SelfMonitorAge,Slot24_TotalTimeClaimedBusy,Slot24_TotalTimeUnclaimedIdle,Slot25_ExpectedMachineGracefulDrainingCompletion,Slot25_JobStarts,Slot25_SelfMonitorAge,Slot25_TotalTimeClaimedBusy,Slot25_TotalTimeUnclaimedIdle,Slot26_ExpectedMachineGracefulDrainingCompletion,Slot26_JobStarts,Slot26_SelfMonitorAge,Slot26_TotalTi
 meClaimedBusy,Slot26_TotalTimeUnclaimedIdle,Slot27_ExpectedMachineGracefulDrainingCompletion,Slot27_JobStarts,Slot27_SelfMonitorAge,Slot27_TotalTimeClaimedBusy,Slot27_TotalTimeUnclaimedIdle,Slot28_ExpectedMachineGracefulDrainingCompletion,Slot28_JobStarts,Slot28_SelfMonitorAge,Slot28_TotalTimeClaimedBusy,Slot28_TotalTimeUnclaimedIdle,Slot29_ExpectedMachineGracefulDrainingCompletion,Slot29_JobStarts,Slot29_SelfMonitorAge,Slot29_TotalTimeClaimedBusy,Slot29_TotalTimeUnclaimedIdle,Slot30_ExpectedMachineGracefulDrainingCompletion,Slot30_JobStarts,Slot30_SelfMonitorAge,Slot30_TotalTimeClaimedBusy,Slot30_TotalTimeUnclaimedIdle,Slot31_ExpectedMachineGracefulDrainingCompletion,Slot31_JobStarts,Slot31_SelfMonitorAge,Slot31_TotalTimeClaimedBusy,Slot31_TotalTimeUnclaimedIdle,Slot32_ExpectedMachineGracefulDrainingCompletion,Slot32_JobStarts,Slot32_SelfMonitorAge,Slot32_TotalTimeClaimedBusy,Slot32_TotalTimeUnclaimedIdle,ResidentSetSize", "NumCkpts": 0, "Err": "process.err", "RecentBlockWrites": 0
 , "JobFinishedHookDone": 1446132434, "ProcId": 0, "ImageSize": 125000, "JobUniverse": 5, "EncryptExecuteDirectory": false, "TransferInputSizeMB": 0, "RecentBlockReads": 314, "SpooledOutputFiles": "harvest.log,CURLTIME_3853266,ChtcWrapper323.out,AuditLog.323,simu_3_323.txt,323.out", "BlockWriteKbytes": 4, "WhenToTransferOutput": "ON_EXIT", "JobCurrentStartExecutingDate": 1446107686, "ExitBySignal": false, "LastMatchTime": 1446107685, "OnExitHold": false, "OrigMaxHosts": 1, "RequestMemory": 1000, "NumJobStarts": 1, "TerminationPending": true, "TotalSuspensions": 0, "BlockReads": 1142, "DAGManJobId": 49581933, "MemoryUsage": "( ( ResidentSetSize + 1023 ) / 1024 )", "ExitCode": 0, "JobNotification": 0, "BlockReadKbytes": 43788, "NumJobMatches": 1, "LocalUserCpu": 0.0d, "LastJobStatus": 2, "BufferBlockSize": 32768, "CommittedTime": 24748, "ExecutableSize_RAW": 6, "LastSuspensionTime": 0, "Matlab": "R2011b", "UserLog": "/home/xguo23/finally_2/Simulation_condor/model_3/323/process.log", "D
 AGManNodesMask": "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27", "CumulativeSlotTime": 24748.0d, "LastJobLeaseRenewal": 1446132433, "MachineAttrSlotWeight0": 1, "NumSystemHolds": 0, "BytesRecvd": 285054.0d, "CondorPlatform": "$CondorPlatform: X86_64-RedHat_6.6 $", "JOBGLIDEIN_ResourceName": "$$([IfThenElse(IsUndefined(TARGET.GLIDEIN_ResourceName), IfThenElse(IsUndefined(TARGET.GLIDEIN_Site), \"wisc.edu\", TARGET.GLIDEIN_Site), TARGET.GLIDEIN_ResourceName)])", "DAGNodeName": "323+323", "PeriodicRelease": "( JobStatus == 5 ) && ( ( CurrentTime - EnteredCurrentStatus ) > 1800 ) && ( JobRunCount < 5 ) && ( HoldReasonCode != 6 ) && ( HoldReasonCode != 14 ) && ( HoldReasonCode != 22 )", "JobRunCount": 1, "LastRemoteHost": "slot1@c070.chtc.wisc.edu", "JobPrio": 0, "LocalSysCpu": 0.0d, "ExecutableSize": 7, "RemoteSysCpu": 175.0d, "TransferInput": "/home/xguo23/finally_2/Simulation_condor/data/323/,/home/xguo23/finally_2/Simulation_condor/data/shared/", "PeriodicHold": false, "WantRemoteIO": true, 
 "CommittedSuspensionTime": 0, "DAGParentNodeNames": "", "CompletionDate": 1446132433, "StreamErr": false, "RecentBlockReadKbytes": 4224, "WantRemoteSyscalls": false, "NumShadowStarts": 1, "MachineAttrCpus0": 1, "DAGManNodesLog": "/home/xguo23/finally_2/Simulation_condor/model_3/./mydag.dag.nodes.log", "Owner": "xguo23", "Requirements": "( ( OpSysMajorVer is 6 ) ) && ( MY.JobUniverse == 12 || MY.JobUniverse == 7 || MY.WantFlocking || MY.WantGlidein || TARGET.PoolName == \"CHTC\" || TARGET.COLLECTOR_HOST_STRING == \"infopool.cs.wisc.edu\" ) && ( TARGET.Arch == \"X86_64\" ) && ( TARGET.OpSys == \"LINUX\" ) && ( TARGET.Disk >= RequestDisk ) && ( TARGET.Memory >= RequestMemory ) && ( TARGET.HasFileTransfer )", "DiskUsage": 1250000, "JobLeaseDuration": 2400, "ClusterId": 49582124, "BufferSize": 524288, "IsCHTCSubmit": true, "RecentStatsLifetimeStarter": 1200, "CommittedSlotTime": 24748.0d, "Args": "--type=Matlab --version=R2011b --cmdtorun=simu_condor --unique=323 -- 3", "Environment": ""
 , "LastPublicClaimId": "<128.104.55.89:32652>#1445371750#1302#...", "Iwd": "/home/xguo23/finally_2/Simulation_condor/model_3/323", "QDate": 1446105525, "CurrentHosts": 0, "User": "xguo23@chtc.wisc.edu", "StreamOut": false }
+{ "GlobalJobId": "submit-3.chtc.wisc.edu#49582148.0#1446105547", "StatsLifetimeStarter": 26230, "JobStartDate": 1446107686, "SubmitEventNotes": "DAG Node: 162+162", "JobStatus": 4, "LeaveJobInQueue": false, "AutoClusterId": 24, "StartdPrincipal": "execute-side@matchsession/128.105.245.170", "WantRHEL6": true, "OnExitRemove": true, "JobCurrentStartDate": 1446107686, "CoreSize": 0, "MATCH_EXP_JOBGLIDEIN_ResourceName": "wisc.edu", "ExitStatus": 0, "Rank": 0.0d, "ResidentSetSize": 150000, "RemoteWallClockTime": 26233.0d, "WantCheckpoint": false, "In": "/dev/null", "MaxHosts": 1, "RootDir": "/", "NumRestarts": 0, "RecentBlockWriteKbytes": 0, "DiskUsage_RAW": 1216669, "EnteredCurrentStatus": 1446133919, "ResidentSetSize_RAW": 126384, "RequestDisk": 1000000, "MyType": "Job", "PeriodicRemove": false, "Cmd": "/home/xguo23/finally_2/Simulation_condor/chtcjobwrapper", "CondorVersion": "$CondorVersion: 8.5.0 Sep 16 2015 BuildID: 341710 $", "ShouldTransferFiles": "YES", "TargetType": "Machine", 
 "MinHosts": 1, "NumCkpts_RAW": 0, "RequestCpus": 1, "RemoteUserCpu": 26088.0d, "BlockWrites": 0, "NiceUser": false, "Out": "process.out", "ImageSize_RAW": 811948, "BytesSent": 30612.0d, "CumulativeSuspensionTime": 0, "TransferIn": false, "AutoClusterAttrs": "JobUniverse,LastCheckpointPlatform,NumCkpts,ClientMachine,_condor_RequestCpus,_condor_RequestDisk,_condor_RequestGPUs,_condor_RequestMemory,RequestCpus,RequestDisk,RequestGPUs,RequestMemory,BIOCHEM,MachineLastMatchTime,ConcurrencyLimits,NiceUser,Rank,Requirements,ImageSize,MemoryRequirements,User,RemoteGroup,SubmitterGroup,SubmitterUserPrio,Group,WIDsTheme,InteractiveJob,Is_Resumable,WantFlocking,WantGlidein,Scheduler,Owner,JobStart,MemoryUsage,IsExpressQueueJob,DiskUsage,HEP_VO,IsDesktop,OSG_VO,x509userproxysubject,PassedTest,IsLocalCMSJob,IsLocalCMSSlot,IsSAMSlot,IsSAMJob,MaxDiskTempC,IsDedicated,estimated_run_hours,IsCHTCSubmit,RequiresCVMFS,DynamicSlot,PartitionableSlot,Slot1_ExpectedMachineGracefulDrainingCompletion,Slot1_J
 obStarts,Slot1_SelfMonitorAge,Slot2_ExpectedMachineGracefulDrainingCompletion,Slot2_JobStarts,Slot2_SelfMonitorAge,Slot3_ExpectedMachineGracefulDrainingCompletion,Slot3_JobStarts,Slot3_SelfMonitorAge,Slot4_ExpectedMachineGracefulDrainingCompletion,Slot4_JobStarts,Slot4_SelfMonitorAge,Slot5_ExpectedMachineGracefulDrainingCompletion,Slot5_JobStarts,Slot5_SelfMonitorAge,Slot6_ExpectedMachineGracefulDrainingCompletion,Slot6_JobStarts,Slot6_SelfMonitorAge,Slot7_ExpectedMachineGracefulDrainingCompletion,Slot7_JobStarts,Slot7_SelfMonitorAge,Slot8_ExpectedMachineGracefulDrainingCompletion,Slot8_JobStarts,Slot8_SelfMonitorAge,Slot1_TotalTimeClaimedBusy,Slot1_TotalTimeUnclaimedIdle,Slot2_TotalTimeClaimedBusy,Slot2_TotalTimeUnclaimedIdle,Slot3_TotalTimeClaimedBusy,Slot3_TotalTimeUnclaimedIdle,Slot4_TotalTimeClaimedBusy,Slot4_TotalTimeUnclaimedIdle,Slot5_TotalTimeClaimedBusy,Slot5_TotalTimeUnclaimedIdle,Slot6_TotalTimeClaimedBusy,Slot6_TotalTimeUnclaimedIdle,Slot7_TotalTimeClaimedBusy,Slot7_Tot
 alTimeUnclaimedIdle,Slot8_TotalTimeClaimedBusy,Slot8_TotalTimeUnclaimedIdle,Slot10_ExpectedMachineGracefulDrainingCompletion,Slot10_JobStarts,Slot10_SelfMonitorAge,Slot11_ExpectedMachineGracefulDrainingCompletion,Slot11_JobStarts,Slot11_SelfMonitorAge,Slot12_ExpectedMachineGracefulDrainingCompletion,Slot12_JobStarts,Slot12_SelfMonitorAge,Slot9_ExpectedMachineGracefulDrainingCompletion,Slot9_JobStarts,Slot9_SelfMonitorAge,Slot12_TotalTimeClaimedBusy,Slot10_TotalTimeClaimedBusy,Slot10_TotalTimeUnclaimedIdle,Slot11_TotalTimeClaimedBusy,Slot11_TotalTimeUnclaimedIdle,Slot12_TotalTimeUnclaimedIdle,Slot9_TotalTimeClaimedBusy,Slot9_TotalTimeUnclaimedIdle,Slot13_ExpectedMachineGracefulDrainingCompletion,Slot13_JobStarts,Slot13_SelfMonitorAge,Slot14_ExpectedMachineGracefulDrainingCompletion,Slot14_JobStarts,Slot14_SelfMonitorAge,Slot15_ExpectedMachineGracefulDrainingCompletion,Slot15_JobStarts,Slot15_SelfMonitorAge,Slot16_ExpectedMachineGracefulDrainingCompletion,Slot16_JobStarts,Slot16_SelfM
 onitorAge,IsResumable,WHEN_TO_TRANSFER_OUTPUT,_condor_Requestadmin_mutex_1,_condor_Requestadmin_mutex_2,_condor_Requestadmin_mutex_3,_condor_Requestmachine_token,Requestadmin_mutex_1,Requestadmin_mutex_2,Requestadmin_mutex_3,Requestmachine_token,nyehle,IsBuildJob,IsMatlabBuildJob,TotalJobRunTime,NodeOnline,Slot13_TotalTimeClaimedBusy,Slot13_TotalTimeUnclaimedIdle,Slot14_TotalTimeClaimedBusy,Slot14_TotalTimeUnclaimedIdle,Slot15_TotalTimeClaimedBusy,Slot15_TotalTimeUnclaimedIdle,Slot16_TotalTimeClaimedBusy,Slot16_TotalTimeUnclaimedIdle,TmpIsFull,trResumable,RequiresCMSFrontier,Slot17_ExpectedMachineGracefulDrainingCompletion,Slot17_JobStarts,Slot17_SelfMonitorAge,Slot17_TotalTimeClaimedBusy,Slot17_TotalTimeUnclaimedIdle,Slot18_ExpectedMachineGracefulDrainingCompletion,Slot18_JobStarts,Slot18_SelfMonitorAge,Slot18_TotalTimeClaimedBusy,Slot18_TotalTimeUnclaimedIdle,Slot19_ExpectedMachineGracefulDrainingCompletion,Slot19_JobStarts,Slot19_SelfMonitorAge,Slot19_TotalTimeClaimedBusy,Slot19_
 TotalTimeUnclaimedIdle,Slot20_ExpectedMachineGracefulDrainingCompletion,Slot20_JobStarts,Slot20_SelfMonitorAge,Slot20_TotalTimeClaimedBusy,Slot20_TotalTimeUnclaimedIdle,Slot21_ExpectedMachineGracefulDrainingCompletion,Slot21_JobStarts,Slot21_SelfMonitorAge,Slot21_TotalTimeClaimedBusy,Slot21_TotalTimeUnclaimedIdle,Slot22_ExpectedMachineGracefulDrainingCompletion,Slot22_JobStarts,Slot22_SelfMonitorAge,Slot22_TotalTimeClaimedBusy,Slot22_TotalTimeUnclaimedIdle,Slot23_ExpectedMachineGracefulDrainingCompletion,Slot23_JobStarts,Slot23_SelfMonitorAge,Slot23_TotalTimeClaimedBusy,Slot23_TotalTimeUnclaimedIdle,Slot24_ExpectedMachineGracefulDrainingCompletion,Slot24_JobStarts,Slot24_SelfMonitorAge,Slot24_TotalTimeClaimedBusy,Slot24_TotalTimeUnclaimedIdle,Slot25_ExpectedMachineGracefulDrainingCompletion,Slot25_JobStarts,Slot25_SelfMonitorAge,Slot25_TotalTimeClaimedBusy,Slot25_TotalTimeUnclaimedIdle,Slot26_ExpectedMachineGracefulDrainingCompletion,Slot26_JobStarts,Slot26_SelfMonitorAge,Slot26_Tot
 alTimeClaimedBusy,Slot26_TotalTimeUnclaimedIdle,Slot27_ExpectedMachineGracefulDrainingCompletion,Slot27_JobStarts,Slot27_SelfMonitorAge,Slot27_TotalTimeClaimedBusy,Slot27_TotalTimeUnclaimedIdle,Slot28_ExpectedMachineGracefulDrainingCompletion,Slot28_JobStarts,Slot28_SelfMonitorAge,Slot28_TotalTimeClaimedBusy,Slot28_TotalTimeUnclaimedIdle,Slot29_ExpectedMachineGracefulDrainingCompletion,Slot29_JobStarts,Slot29_SelfMonitorAge,Slot29_TotalTimeClaimedBusy,Slot29_TotalTimeUnclaimedIdle,Slot30_ExpectedMachineGracefulDrainingCompletion,Slot30_JobStarts,Slot30_SelfMonitorAge,Slot30_TotalTimeClaimedBusy,Slot30_TotalTimeUnclaimedIdle,Slot31_ExpectedMachineGracefulDrainingCompletion,Slot31_JobStarts,Slot31_SelfMonitorAge,Slot31_TotalTimeClaimedBusy,Slot31_TotalTimeUnclaimedIdle,Slot32_ExpectedMachineGracefulDrainingCompletion,Slot32_JobStarts,Slot32_SelfMonitorAge,Slot32_TotalTimeClaimedBusy,Slot32_TotalTimeUnclaimedIdle,ResidentSetSize", "NumCkpts": 0, "Err": "process.err", "RecentBlockWrites
 ": 0, "JobFinishedHookDone": 1446133919, "ProcId": 0, "ImageSize": 1000000, "JobUniverse": 5, "EncryptExecuteDirectory": false, "TransferInputSizeMB": 0, "RecentBlockReads": 0, "SpooledOutputFiles": "CURLTIME_1114551,ChtcWrapper162.out,AuditLog.162,simu_3_162.txt,harvest.log,162.out", "BlockWriteKbytes": 0, "WhenToTransferOutput": "ON_EXIT", "JobCurrentStartExecutingDate": 1446107688, "ExitBySignal": false, "LastMatchTime": 1446107686, "OnExitHold": false, "OrigMaxHosts": 1, "RequestMemory": 1000, "NumJobStarts": 1, "TerminationPending": true, "TotalSuspensions": 0, "BlockReads": 0, "DAGManJobId": 49581933, "MemoryUsage": "( ( ResidentSetSize + 1023 ) / 1024 )", "ExitCode": 0, "JobNotification": 0, "BlockReadKbytes": 0, "NumJobMatches": 1, "LocalUserCpu": 0.0d, "LastJobStatus": 2, "BufferBlockSize": 32768, "CommittedTime": 26233, "ExecutableSize_RAW": 6, "LastSuspensionTime": 0, "Matlab": "R2011b", "UserLog": "/home/xguo23/finally_2/Simulation_condor/model_3/162/process.log", "DAGMa
 nNodesMask": "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27", "CumulativeSlotTime": 26233.0d, "LastJobLeaseRenewal": 1446133919, "MachineAttrSlotWeight0": 1, "NumSystemHolds": 0, "BytesRecvd": 285054.0d, "CondorPlatform": "$CondorPlatform: X86_64-RedHat_6.6 $", "JOBGLIDEIN_ResourceName": "$$([IfThenElse(IsUndefined(TARGET.GLIDEIN_ResourceName), IfThenElse(IsUndefined(TARGET.GLIDEIN_Site), \"wisc.edu\", TARGET.GLIDEIN_Site), TARGET.GLIDEIN_ResourceName)])", "DAGNodeName": "162+162", "PeriodicRelease": "( JobStatus == 5 ) && ( ( CurrentTime - EnteredCurrentStatus ) > 1800 ) && ( JobRunCount < 5 ) && ( HoldReasonCode != 6 ) && ( HoldReasonCode != 14 ) && ( HoldReasonCode != 22 )", "JobRunCount": 1, "LastRemoteHost": "slot1@e370.chtc.WISC.EDU", "JobPrio": 0, "LocalSysCpu": 0.0d, "ExecutableSize": 7, "RemoteSysCpu": 96.0d, "TransferInput": "/home/xguo23/finally_2/Simulation_condor/data/162/,/home/xguo23/finally_2/Simulation_condor/data/shared/", "PeriodicHold": false, "WantRemoteIO": true, "Comm
 ittedSuspensionTime": 0, "DAGParentNodeNames": "", "CompletionDate": 1446133919, "StreamErr": false, "RecentBlockReadKbytes": 0, "WantRemoteSyscalls": false, "NumShadowStarts": 1, "MachineAttrCpus0": 1, "DAGManNodesLog": "/home/xguo23/finally_2/Simulation_condor/model_3/./mydag.dag.nodes.log", "Owner": "xguo23", "Requirements": "( ( OpSysMajorVer is 6 ) ) && ( MY.JobUniverse == 12 || MY.JobUniverse == 7 || MY.WantFlocking || MY.WantGlidein || TARGET.PoolName == \"CHTC\" || TARGET.COLLECTOR_HOST_STRING == \"infopool.cs.wisc.edu\" ) && ( TARGET.Arch == \"X86_64\" ) && ( TARGET.OpSys == \"LINUX\" ) && ( TARGET.Disk >= RequestDisk ) && ( TARGET.Memory >= RequestMemory ) && ( TARGET.HasFileTransfer )", "DiskUsage": 1250000, "JobLeaseDuration": 2400, "ClusterId": 49582148, "BufferSize": 524288, "IsCHTCSubmit": true, "RecentStatsLifetimeStarter": 1200, "CommittedSlotTime": 26233.0d, "Args": "--type=Matlab --version=R2011b --cmdtorun=simu_condor --unique=162 -- 3", "Environment": "", "LastP
 ublicClaimId": "<128.105.245.170:9482>#1443991414#13008#...", "Iwd": "/home/xguo23/finally_2/Simulation_condor/model_3/162", "QDate": 1446105547, "CurrentHosts": 0, "User": "xguo23@chtc.wisc.edu", "StreamOut": false }
+{ "GlobalJobId": "submit-3.chtc.wisc.edu#49582154.0#1446105553", "StatsLifetimeStarter": 25874, "JobStartDate": 1446107686, "SubmitEventNotes": "DAG Node: 333+333", "JobStatus": 4, "LeaveJobInQueue": false, "StartdPrincipal": "execute-side@matchsession/128.105.245.120", "WantRHEL6": true, "OnExitRemove": true, "JobCurrentStartDate": 1446107686, "CoreSize": 0, "MATCH_EXP_JOBGLIDEIN_ResourceName": "wisc.edu", "Rank": 0.0d, "ExitStatus": 0, "ResidentSetSize": 150000, "RemoteWallClockTime": 25876.0d, "WantCheckpoint": false, "In": "/dev/null", "MaxHosts": 1, "RootDir": "/", "NumRestarts": 0, "RecentBlockWriteKbytes": 0, "DiskUsage_RAW": 1216669, "EnteredCurrentStatus": 1446133562, "ResidentSetSize_RAW": 125740, "RequestDisk": 1000000, "MyType": "Job", "PeriodicRemove": false, "Cmd": "/home/xguo23/finally_2/Simulation_condor/chtcjobwrapper", "CondorVersion": "$CondorVersion: 8.5.0 Sep 16 2015 BuildID: 341710 $", "ShouldTransferFiles": "YES", "TargetType": "Machine", "MinHosts": 1, "NumCk
 pts_RAW": 0, "RequestCpus": 1, "RemoteUserCpu": 25692.0d, "BlockWrites": 0, "NiceUser": false, "Out": "process.out", "ImageSize_RAW": 811948, "BytesSent": 30542.0d, "CumulativeSuspensionTime": 0, "TransferIn": false, "NumCkpts": 0, "Err": "process.err", "RecentBlockWrites": 0, "JobFinishedHookDone": 1446133562, "ProcId": 0, "ImageSize": 1000000, "JobUniverse": 5, "EncryptExecuteDirectory": false, "TransferInputSizeMB": 0, "RecentBlockReads": 0, "SpooledOutputFiles": "CURLTIME_523030,ChtcWrapper333.out,AuditLog.333,simu_3_333.txt,harvest.log,333.out", "BlockWriteKbytes": 0, "WhenToTransferOutput": "ON_EXIT", "JobCurrentStartExecutingDate": 1446107688, "ExitBySignal": false, "LastMatchTime": 1446107686, "OnExitHold": false, "OrigMaxHosts": 1, "RequestMemory": 1000, "NumJobStarts": 1, "TerminationPending": true, "TotalSuspensions": 0, "BlockReads": 0, "DAGManJobId": 49581933, "MemoryUsage": "( ( ResidentSetSize + 1023 ) / 1024 )", "ExitCode": 0, "JobNotification": 0, "BlockReadKbytes":
  0, "NumJobMatches": 1, "LocalUserCpu": 0.0d, "LastJobStatus": 2, "BufferBlockSize": 32768, "CommittedTime": 25876, "ExecutableSize_RAW": 6, "LastSuspensionTime": 0, "Matlab": "R2011b", "UserLog": "/home/xguo23/finally_2/Simulation_condor/model_3/333/process.log", "DAGManNodesMask": "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27", "CumulativeSlotTime": 25876.0d, "LastJobLeaseRenewal": 1446133562, "MachineAttrSlotWeight0": 1, "NumSystemHolds": 0, "BytesRecvd": 285054.0d, "CondorPlatform": "$CondorPlatform: X86_64-RedHat_6.6 $", "JOBGLIDEIN_ResourceName": "$$([IfThenElse(IsUndefined(TARGET.GLIDEIN_ResourceName), IfThenElse(IsUndefined(TARGET.GLIDEIN_Site), \"wisc.edu\", TARGET.GLIDEIN_Site), TARGET.GLIDEIN_ResourceName)])", "DAGNodeName":

<TRUNCATED>


[41/50] [abbrv] incubator-asterixdb git commit: Move merged files

Posted by im...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d630d1a2/asterixdb/asterix-app/src/test/resources/runtimets/queries/external-library/classad-parser-new/classad-parser-new.5.lib.aql
----------------------------------------------------------------------
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries/external-library/classad-parser-new/classad-parser-new.5.lib.aql b/asterixdb/asterix-app/src/test/resources/runtimets/queries/external-library/classad-parser-new/classad-parser-new.5.lib.aql
new file mode 100644
index 0000000..86af80f
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries/external-library/classad-parser-new/classad-parser-new.5.lib.aql
@@ -0,0 +1,19 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+uninstall externallibtest testlib
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d630d1a2/asterixdb/asterix-app/src/test/resources/runtimets/queries/external-library/classad-parser-old/classad-parser-old.1.ddl.aql
----------------------------------------------------------------------
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries/external-library/classad-parser-old/classad-parser-old.1.ddl.aql b/asterixdb/asterix-app/src/test/resources/runtimets/queries/external-library/classad-parser-old/classad-parser-old.1.ddl.aql
new file mode 100644
index 0000000..21c8ac6
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries/external-library/classad-parser-old/classad-parser-old.1.ddl.aql
@@ -0,0 +1,31 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*
+ * Description  : Create an adapter that uses external parser to parse data from files
+ * Expected Res : Success
+ * Date         : Feb, 09, 2016
+ */
+
+drop dataverse externallibtest if exists;
+create dataverse externallibtest;
+use dataverse externallibtest;
+
+create type Classad as open {
+GlobalJobId: string
+};

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d630d1a2/asterixdb/asterix-app/src/test/resources/runtimets/queries/external-library/classad-parser-old/classad-parser-old.2.lib.aql
----------------------------------------------------------------------
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries/external-library/classad-parser-old/classad-parser-old.2.lib.aql b/asterixdb/asterix-app/src/test/resources/runtimets/queries/external-library/classad-parser-old/classad-parser-old.2.lib.aql
new file mode 100644
index 0000000..0290611
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries/external-library/classad-parser-old/classad-parser-old.2.lib.aql
@@ -0,0 +1,19 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+install externallibtest testlib src/test/resources/externallib/testlib-zip-binary-assembly.zip
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d630d1a2/asterixdb/asterix-app/src/test/resources/runtimets/queries/external-library/classad-parser-old/classad-parser-old.3.ddl.aql
----------------------------------------------------------------------
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries/external-library/classad-parser-old/classad-parser-old.3.ddl.aql b/asterixdb/asterix-app/src/test/resources/runtimets/queries/external-library/classad-parser-old/classad-parser-old.3.ddl.aql
new file mode 100644
index 0000000..5b2d50c
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries/external-library/classad-parser-old/classad-parser-old.3.ddl.aql
@@ -0,0 +1,25 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+use dataverse externallibtest;
+
+create external dataset Condor(Classad) using localfs(
+("path"="asterix_nc1://data/external-parser/jobads.old"),
+("format"="line-separated"),
+("parser"="testlib#org.apache.asterix.external.library.ClassAdParserFactory"));

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d630d1a2/asterixdb/asterix-app/src/test/resources/runtimets/queries/external-library/classad-parser-old/classad-parser-old.4.query.aql
----------------------------------------------------------------------
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries/external-library/classad-parser-old/classad-parser-old.4.query.aql b/asterixdb/asterix-app/src/test/resources/runtimets/queries/external-library/classad-parser-old/classad-parser-old.4.query.aql
new file mode 100644
index 0000000..9d5d499
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries/external-library/classad-parser-old/classad-parser-old.4.query.aql
@@ -0,0 +1,23 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+use dataverse externallibtest;
+
+for $x in dataset Condor
+order by $x.GlobalJobId
+return $x;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d630d1a2/asterixdb/asterix-app/src/test/resources/runtimets/queries/external-library/classad-parser-old/classad-parser-old.5.lib.aql
----------------------------------------------------------------------
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries/external-library/classad-parser-old/classad-parser-old.5.lib.aql b/asterixdb/asterix-app/src/test/resources/runtimets/queries/external-library/classad-parser-old/classad-parser-old.5.lib.aql
new file mode 100644
index 0000000..86af80f
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries/external-library/classad-parser-old/classad-parser-old.5.lib.aql
@@ -0,0 +1,19 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+uninstall externallibtest testlib
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d630d1a2/asterixdb/asterix-app/src/test/resources/runtimets/queries/external/invalid-format/invalid-format.1.ddl.aql
----------------------------------------------------------------------
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries/external/invalid-format/invalid-format.1.ddl.aql b/asterixdb/asterix-app/src/test/resources/runtimets/queries/external/invalid-format/invalid-format.1.ddl.aql
new file mode 100644
index 0000000..7c668e4
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries/external/invalid-format/invalid-format.1.ddl.aql
@@ -0,0 +1,34 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/**
+ * Testing an external dataset with invalid adapter format parameter value
+ * Expected result: fail - Unknown data format.
+ */
+
+drop dataverse temp if exists;
+create dataverse temp
+use dataverse temp;
+
+create type test as closed {
+  id: int32
+};
+
+create external dataset testds (test) using localfs(
+("path"="asterix_nc1://data/csv/sample_04_quote_error.csv"),
+("format"="add"));
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d630d1a2/asterixdb/asterix-app/src/test/resources/runtimets/queries/external/invalid-format/invalid-format.2.query.aql
----------------------------------------------------------------------
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries/external/invalid-format/invalid-format.2.query.aql b/asterixdb/asterix-app/src/test/resources/runtimets/queries/external/invalid-format/invalid-format.2.query.aql
new file mode 100644
index 0000000..438e0b6
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries/external/invalid-format/invalid-format.2.query.aql
@@ -0,0 +1,27 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/**
+ * Testing an external dataset with invalid adapter format parameter value
+ * Expected result: fail - Unknown data format.
+ */
+
+use dataverse temp;
+
+for $i in dataset testds
+return $i;
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d630d1a2/asterixdb/asterix-app/src/test/resources/runtimets/queries/feeds/twitter-feed/twitter-feed.1.ddl.aql
----------------------------------------------------------------------
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries/feeds/twitter-feed/twitter-feed.1.ddl.aql b/asterixdb/asterix-app/src/test/resources/runtimets/queries/feeds/twitter-feed/twitter-feed.1.ddl.aql
new file mode 100644
index 0000000..d7827c5
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries/feeds/twitter-feed/twitter-feed.1.ddl.aql
@@ -0,0 +1,54 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*
+ * Description  : Create a twitter feed with missing parameters
+ * Expected Res : Failure
+ */
+
+drop dataverse feeds if exists;
+create dataverse feeds;
+use dataverse feeds;
+
+create type TwitterUser if not exists as open{
+screen_name: string,
+language: string,
+friends_count: int32,
+status_count: int32,
+name: string,
+followers_count: int32
+};
+
+create type Tweet if not exists as open{
+id: string,
+user: TwitterUser,
+latitude:double,
+longitude:double,
+created_at:string,
+message_text:string
+};
+
+create dataset Tweets (Tweet)
+primary key id;
+
+create feed TwitterFeed using push_twitter(
+("type-name"="Tweet"),
+("format"="twitter-status"),
+("consumer.key"="************"),
+("access.token"="**********"),
+("access.token.secret"="*************"));
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d630d1a2/asterixdb/asterix-app/src/test/resources/runtimets/queries/feeds/twitter-feed/twitter-feed.2.update.aql
----------------------------------------------------------------------
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries/feeds/twitter-feed/twitter-feed.2.update.aql b/asterixdb/asterix-app/src/test/resources/runtimets/queries/feeds/twitter-feed/twitter-feed.2.update.aql
new file mode 100644
index 0000000..6712969
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries/feeds/twitter-feed/twitter-feed.2.update.aql
@@ -0,0 +1,25 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*
+ * Description  : Create a twitter feed with missing parameters
+ * Expected Res : Failure
+ */
+
+use dataverse feeds;
+connect feed TwitterFeed to dataset Tweets;
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d630d1a2/asterixdb/asterix-app/src/test/resources/runtimets/queries/load/file-not-found/file-not-found.1.ddl.aql
----------------------------------------------------------------------
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries/load/file-not-found/file-not-found.1.ddl.aql b/asterixdb/asterix-app/src/test/resources/runtimets/queries/load/file-not-found/file-not-found.1.ddl.aql
new file mode 100644
index 0000000..37a8f14
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries/load/file-not-found/file-not-found.1.ddl.aql
@@ -0,0 +1,29 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/**
+ * Test loading from a file that does not exist.
+ * Expected result: fail - File not found.
+ */
+
+drop dataverse broken if exists;
+create dataverse broken;
+use dataverse broken;
+
+create type xtype as closed { id: int32 };
+create dataset X(xtype) primary key id;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d630d1a2/asterixdb/asterix-app/src/test/resources/runtimets/queries/load/file-not-found/file-not-found.2.update.aql
----------------------------------------------------------------------
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries/load/file-not-found/file-not-found.2.update.aql b/asterixdb/asterix-app/src/test/resources/runtimets/queries/load/file-not-found/file-not-found.2.update.aql
new file mode 100644
index 0000000..c26ffd5
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries/load/file-not-found/file-not-found.2.update.aql
@@ -0,0 +1,30 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/**
+ * Test loading from a file that does not exist.
+ * Expected result: fail - File not found.
+ */
+
+use dataverse broken;
+
+load dataset X using localfs(
+  ("path"="asterix_nc1://bla"),
+  ("format"="delimited-text"),
+  ("delimiter"="|")
+);

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d630d1a2/asterixdb/asterix-app/src/test/resources/runtimets/queries/temp-dataset/temp_primary_plus_ngram_flush/temp_primary_plus_ngram_flush.1.ddl.aql
----------------------------------------------------------------------
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries/temp-dataset/temp_primary_plus_ngram_flush/temp_primary_plus_ngram_flush.1.ddl.aql b/asterixdb/asterix-app/src/test/resources/runtimets/queries/temp-dataset/temp_primary_plus_ngram_flush/temp_primary_plus_ngram_flush.1.ddl.aql
new file mode 100644
index 0000000..671e5a2
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries/temp-dataset/temp_primary_plus_ngram_flush/temp_primary_plus_ngram_flush.1.ddl.aql
@@ -0,0 +1,67 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*
+ * Test case Name  : temp_primary_plus_ngram_flush.aql
+ * Description     : Check that flush for temporary datasets with ngram indexes succeeds.
+ * Expected Result : Success
+ * Date            : Apr 4 2016
+ */
+
+drop dataverse recovery if exists;
+create dataverse recovery;
+use dataverse recovery;
+
+/* For raw Fragile data */
+create type FragileTypeRaw as closed {
+row_id: int32,
+sid: int32,
+date: string,
+day: int32,
+time: string,
+bpm: int32,
+RR: float,
+text: string,
+location: point,
+text2: string
+};
+
+/* For cleaned Fragile data */
+create type FragileType as closed {
+row_id: int32,
+sid: int32,
+date: date,
+day: int32,
+time: time,
+bpm: int32,
+RR: float,
+text: string,
+location: point,
+text2: string
+};
+
+/* Create dataset for loading raw Fragile data */
+create temporary dataset Fragile_raw (FragileTypeRaw)
+primary key row_id;
+
+/* Create dataset for cleaned Fragile data */
+create temporary dataset Fragile (FragileType)
+primary key row_id;
+
+/* Create default secondary index on dataset clean Fragile */
+create index cfText2Ix on Fragile(text2) type ngram(3);
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d630d1a2/asterixdb/asterix-app/src/test/resources/runtimets/queries/temp-dataset/temp_primary_plus_ngram_flush/temp_primary_plus_ngram_flush.2.update.aql
----------------------------------------------------------------------
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries/temp-dataset/temp_primary_plus_ngram_flush/temp_primary_plus_ngram_flush.2.update.aql b/asterixdb/asterix-app/src/test/resources/runtimets/queries/temp-dataset/temp_primary_plus_ngram_flush/temp_primary_plus_ngram_flush.2.update.aql
new file mode 100644
index 0000000..1b3cbd3
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries/temp-dataset/temp_primary_plus_ngram_flush/temp_primary_plus_ngram_flush.2.update.aql
@@ -0,0 +1,46 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*
+ * Test case Name  : temp_primary_plus_ngram_flush.aql
+ * Description     : Check that flush for temporary datasets with ngram indexes succeeds.
+ * Expected Result : Success
+ * Date            : Apr 4 2016
+ */
+
+use dataverse recovery;
+
+load dataset Fragile_raw using localfs
+(("path"="asterix_nc1://data/csv/fragile_02.adm"),("format"="adm")) pre-sorted;
+
+/* Load Fragile data from raw dataset into cleaned dataset */
+insert into dataset Fragile (
+for $t in dataset Fragile_raw
+where $t.row_id <= 1000
+return {
+"row_id": $t.row_id,
+"sid": $t.sid,
+"date": date($t.date),
+"day": $t.day,
+"time": parse-time($t.time, "h:m:s"),
+"bpm": $t.bpm,
+"RR": $t.RR,
+"text": $t.text,
+"location": $t.location,
+"text2": $t.text2}
+);
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d630d1a2/asterixdb/asterix-app/src/test/resources/runtimets/queries/temp-dataset/temp_primary_plus_ngram_flush/temp_primary_plus_ngram_flush.3.query.aql
----------------------------------------------------------------------
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries/temp-dataset/temp_primary_plus_ngram_flush/temp_primary_plus_ngram_flush.3.query.aql b/asterixdb/asterix-app/src/test/resources/runtimets/queries/temp-dataset/temp_primary_plus_ngram_flush/temp_primary_plus_ngram_flush.3.query.aql
new file mode 100644
index 0000000..67c41d6
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries/temp-dataset/temp_primary_plus_ngram_flush/temp_primary_plus_ngram_flush.3.query.aql
@@ -0,0 +1,29 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*
+ * Test case Name  : temp_primary_plus_ngram_flush.aql
+ * Description     : Check that flush for temporary datasets with ngram indexes succeeds.
+ * Expected Result : Success
+ * Date            : Apr 4 2016
+ */
+
+use dataverse recovery;
+
+count (for $x in dataset Fragile
+where contains($x.text2, "location") return $x);
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d630d1a2/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/global-aggregate/q01/q01.1.ddl.sqlpp
----------------------------------------------------------------------
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/global-aggregate/q01/q01.1.ddl.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/global-aggregate/q01/q01.1.ddl.sqlpp
new file mode 100644
index 0000000..a7c021b
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/global-aggregate/q01/q01.1.ddl.sqlpp
@@ -0,0 +1,51 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+drop  database TinySocial if exists;
+create  database TinySocial;
+
+use TinySocial;
+
+
+create type TinySocial.TwitterUserType as
+{
+  "screen-name" : string
+}
+
+create type TinySocial.TweetMessageType as {
+  tweetid : string
+}
+
+create type TinySocial.FacebookUserType as
+ open {
+  id : int64
+}
+
+create type TinySocial.FacebookMessageType as
+ open {
+  "message-id" : int64
+}
+
+create  table FacebookUsers(FacebookUserType) primary key id;
+create  table FacebookMessages(FacebookMessageType) primary key "message-id";
+create  table TwitterUsers(TwitterUserType) primary key "screen-name";
+create  table TweetMessages(TweetMessageType) primary key tweetid hints ("CARDINALITY"="100");
+create  index fbUserSinceIdx  on FacebookUsers ("user-since":datetime) type btree enforced;
+create  index fbAuthorIdx  on FacebookMessages ("author-id":int64) type btree enforced;
+

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d630d1a2/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/global-aggregate/q01/q01.2.update.sqlpp
----------------------------------------------------------------------
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/global-aggregate/q01/q01.2.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/global-aggregate/q01/q01.2.update.sqlpp
new file mode 100644
index 0000000..4b757cd
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/global-aggregate/q01/q01.2.update.sqlpp
@@ -0,0 +1,30 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+use TinySocial;
+
+
+load  table FacebookUsers using "localfs" (("path"="asterix_nc1://data/tinysocial/fbu.adm"),("format"="adm"));
+
+load  table FacebookMessages using "localfs" (("path"="asterix_nc1://data/tinysocial/fbm.adm"),("format"="adm"));
+
+load  table TwitterUsers using "localfs" (("path"="asterix_nc1://data/tinysocial/twu.adm"),("format"="adm"));
+
+load  table TweetMessages using "localfs" (("path"="asterix_nc1://data/tinysocial/twm.adm"),("format"="adm"));
+

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d630d1a2/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/global-aggregate/q01/q01.3.query.sqlpp
----------------------------------------------------------------------
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/global-aggregate/q01/q01.3.query.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/global-aggregate/q01/q01.3.query.sqlpp
new file mode 100644
index 0000000..6664c91
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/global-aggregate/q01/q01.3.query.sqlpp
@@ -0,0 +1,23 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+USE TinySocial;
+
+SELECT count(u."friend-ids") count
+FROM FacebookUsers u;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d630d1a2/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/global-aggregate/q02/q02.1.ddl.sqlpp
----------------------------------------------------------------------
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/global-aggregate/q02/q02.1.ddl.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/global-aggregate/q02/q02.1.ddl.sqlpp
new file mode 100644
index 0000000..a7c021b
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/global-aggregate/q02/q02.1.ddl.sqlpp
@@ -0,0 +1,51 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+drop  database TinySocial if exists;
+create  database TinySocial;
+
+use TinySocial;
+
+
+create type TinySocial.TwitterUserType as
+{
+  "screen-name" : string
+}
+
+create type TinySocial.TweetMessageType as {
+  tweetid : string
+}
+
+create type TinySocial.FacebookUserType as
+ open {
+  id : int64
+}
+
+create type TinySocial.FacebookMessageType as
+ open {
+  "message-id" : int64
+}
+
+create  table FacebookUsers(FacebookUserType) primary key id;
+create  table FacebookMessages(FacebookMessageType) primary key "message-id";
+create  table TwitterUsers(TwitterUserType) primary key "screen-name";
+create  table TweetMessages(TweetMessageType) primary key tweetid hints ("CARDINALITY"="100");
+create  index fbUserSinceIdx  on FacebookUsers ("user-since":datetime) type btree enforced;
+create  index fbAuthorIdx  on FacebookMessages ("author-id":int64) type btree enforced;
+

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d630d1a2/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/global-aggregate/q02/q02.2.update.sqlpp
----------------------------------------------------------------------
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/global-aggregate/q02/q02.2.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/global-aggregate/q02/q02.2.update.sqlpp
new file mode 100644
index 0000000..4b757cd
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/global-aggregate/q02/q02.2.update.sqlpp
@@ -0,0 +1,30 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+use TinySocial;
+
+
+load  table FacebookUsers using "localfs" (("path"="asterix_nc1://data/tinysocial/fbu.adm"),("format"="adm"));
+
+load  table FacebookMessages using "localfs" (("path"="asterix_nc1://data/tinysocial/fbm.adm"),("format"="adm"));
+
+load  table TwitterUsers using "localfs" (("path"="asterix_nc1://data/tinysocial/twu.adm"),("format"="adm"));
+
+load  table TweetMessages using "localfs" (("path"="asterix_nc1://data/tinysocial/twm.adm"),("format"="adm"));
+

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d630d1a2/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/global-aggregate/q02/q02.3.query.sqlpp
----------------------------------------------------------------------
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/global-aggregate/q02/q02.3.query.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/global-aggregate/q02/q02.3.query.sqlpp
new file mode 100644
index 0000000..1204809
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/global-aggregate/q02/q02.3.query.sqlpp
@@ -0,0 +1,23 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+USE TinySocial;
+
+SELECT 1 foo, COUNT(u."friend-ids") count
+FROM FacebookUsers u;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d630d1a2/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/global-aggregate/q03/q03.1.ddl.sqlpp
----------------------------------------------------------------------
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/global-aggregate/q03/q03.1.ddl.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/global-aggregate/q03/q03.1.ddl.sqlpp
new file mode 100644
index 0000000..a7c021b
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/global-aggregate/q03/q03.1.ddl.sqlpp
@@ -0,0 +1,51 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+drop  database TinySocial if exists;
+create  database TinySocial;
+
+use TinySocial;
+
+
+create type TinySocial.TwitterUserType as
+{
+  "screen-name" : string
+}
+
+create type TinySocial.TweetMessageType as {
+  tweetid : string
+}
+
+create type TinySocial.FacebookUserType as
+ open {
+  id : int64
+}
+
+create type TinySocial.FacebookMessageType as
+ open {
+  "message-id" : int64
+}
+
+create  table FacebookUsers(FacebookUserType) primary key id;
+create  table FacebookMessages(FacebookMessageType) primary key "message-id";
+create  table TwitterUsers(TwitterUserType) primary key "screen-name";
+create  table TweetMessages(TweetMessageType) primary key tweetid hints ("CARDINALITY"="100");
+create  index fbUserSinceIdx  on FacebookUsers ("user-since":datetime) type btree enforced;
+create  index fbAuthorIdx  on FacebookMessages ("author-id":int64) type btree enforced;
+

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d630d1a2/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/global-aggregate/q03/q03.2.update.sqlpp
----------------------------------------------------------------------
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/global-aggregate/q03/q03.2.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/global-aggregate/q03/q03.2.update.sqlpp
new file mode 100644
index 0000000..4b757cd
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/global-aggregate/q03/q03.2.update.sqlpp
@@ -0,0 +1,30 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+use TinySocial;
+
+
+load  table FacebookUsers using "localfs" (("path"="asterix_nc1://data/tinysocial/fbu.adm"),("format"="adm"));
+
+load  table FacebookMessages using "localfs" (("path"="asterix_nc1://data/tinysocial/fbm.adm"),("format"="adm"));
+
+load  table TwitterUsers using "localfs" (("path"="asterix_nc1://data/tinysocial/twu.adm"),("format"="adm"));
+
+load  table TweetMessages using "localfs" (("path"="asterix_nc1://data/tinysocial/twm.adm"),("format"="adm"));
+

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d630d1a2/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/global-aggregate/q03/q03.3.query.sqlpp
----------------------------------------------------------------------
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/global-aggregate/q03/q03.3.query.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/global-aggregate/q03/q03.3.query.sqlpp
new file mode 100644
index 0000000..c0bf442
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/global-aggregate/q03/q03.3.query.sqlpp
@@ -0,0 +1,23 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+USE TinySocial;
+
+SELECT COUNT(1) count
+FROM FacebookUsers u;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d630d1a2/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/global-aggregate/q04/q04.1.ddl.sqlpp
----------------------------------------------------------------------
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/global-aggregate/q04/q04.1.ddl.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/global-aggregate/q04/q04.1.ddl.sqlpp
new file mode 100644
index 0000000..a7c021b
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/global-aggregate/q04/q04.1.ddl.sqlpp
@@ -0,0 +1,51 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+drop  database TinySocial if exists;
+create  database TinySocial;
+
+use TinySocial;
+
+
+create type TinySocial.TwitterUserType as
+{
+  "screen-name" : string
+}
+
+create type TinySocial.TweetMessageType as {
+  tweetid : string
+}
+
+create type TinySocial.FacebookUserType as
+ open {
+  id : int64
+}
+
+create type TinySocial.FacebookMessageType as
+ open {
+  "message-id" : int64
+}
+
+create  table FacebookUsers(FacebookUserType) primary key id;
+create  table FacebookMessages(FacebookMessageType) primary key "message-id";
+create  table TwitterUsers(TwitterUserType) primary key "screen-name";
+create  table TweetMessages(TweetMessageType) primary key tweetid hints ("CARDINALITY"="100");
+create  index fbUserSinceIdx  on FacebookUsers ("user-since":datetime) type btree enforced;
+create  index fbAuthorIdx  on FacebookMessages ("author-id":int64) type btree enforced;
+

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d630d1a2/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/global-aggregate/q04/q04.2.update.sqlpp
----------------------------------------------------------------------
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/global-aggregate/q04/q04.2.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/global-aggregate/q04/q04.2.update.sqlpp
new file mode 100644
index 0000000..4b757cd
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/global-aggregate/q04/q04.2.update.sqlpp
@@ -0,0 +1,30 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+use TinySocial;
+
+
+load  table FacebookUsers using "localfs" (("path"="asterix_nc1://data/tinysocial/fbu.adm"),("format"="adm"));
+
+load  table FacebookMessages using "localfs" (("path"="asterix_nc1://data/tinysocial/fbm.adm"),("format"="adm"));
+
+load  table TwitterUsers using "localfs" (("path"="asterix_nc1://data/tinysocial/twu.adm"),("format"="adm"));
+
+load  table TweetMessages using "localfs" (("path"="asterix_nc1://data/tinysocial/twm.adm"),("format"="adm"));
+

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d630d1a2/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/global-aggregate/q04/q04.3.query.sqlpp
----------------------------------------------------------------------
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/global-aggregate/q04/q04.3.query.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/global-aggregate/q04/q04.3.query.sqlpp
new file mode 100644
index 0000000..4d1b4d5
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/global-aggregate/q04/q04.3.query.sqlpp
@@ -0,0 +1,23 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+USE TinySocial;
+
+SELECT COUNT([1,2,3]) count
+FROM FacebookUsers u;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d630d1a2/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/global-aggregate/q05_error/q05_error.1.ddl.sqlpp
----------------------------------------------------------------------
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/global-aggregate/q05_error/q05_error.1.ddl.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/global-aggregate/q05_error/q05_error.1.ddl.sqlpp
new file mode 100644
index 0000000..a7c021b
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/global-aggregate/q05_error/q05_error.1.ddl.sqlpp
@@ -0,0 +1,51 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+drop  database TinySocial if exists;
+create  database TinySocial;
+
+use TinySocial;
+
+
+create type TinySocial.TwitterUserType as
+{
+  "screen-name" : string
+}
+
+create type TinySocial.TweetMessageType as {
+  tweetid : string
+}
+
+create type TinySocial.FacebookUserType as
+ open {
+  id : int64
+}
+
+create type TinySocial.FacebookMessageType as
+ open {
+  "message-id" : int64
+}
+
+create  table FacebookUsers(FacebookUserType) primary key id;
+create  table FacebookMessages(FacebookMessageType) primary key "message-id";
+create  table TwitterUsers(TwitterUserType) primary key "screen-name";
+create  table TweetMessages(TweetMessageType) primary key tweetid hints ("CARDINALITY"="100");
+create  index fbUserSinceIdx  on FacebookUsers ("user-since":datetime) type btree enforced;
+create  index fbAuthorIdx  on FacebookMessages ("author-id":int64) type btree enforced;
+

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d630d1a2/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/global-aggregate/q05_error/q05_error.2.update.sqlpp
----------------------------------------------------------------------
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/global-aggregate/q05_error/q05_error.2.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/global-aggregate/q05_error/q05_error.2.update.sqlpp
new file mode 100644
index 0000000..4b757cd
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/global-aggregate/q05_error/q05_error.2.update.sqlpp
@@ -0,0 +1,30 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+use TinySocial;
+
+
+load  table FacebookUsers using "localfs" (("path"="asterix_nc1://data/tinysocial/fbu.adm"),("format"="adm"));
+
+load  table FacebookMessages using "localfs" (("path"="asterix_nc1://data/tinysocial/fbm.adm"),("format"="adm"));
+
+load  table TwitterUsers using "localfs" (("path"="asterix_nc1://data/tinysocial/twu.adm"),("format"="adm"));
+
+load  table TweetMessages using "localfs" (("path"="asterix_nc1://data/tinysocial/twm.adm"),("format"="adm"));
+

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d630d1a2/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/global-aggregate/q05_error/q05_error.3.query.sqlpp
----------------------------------------------------------------------
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/global-aggregate/q05_error/q05_error.3.query.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/global-aggregate/q05_error/q05_error.3.query.sqlpp
new file mode 100644
index 0000000..7922734
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/global-aggregate/q05_error/q05_error.3.query.sqlpp
@@ -0,0 +1,23 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+USE TinySocial;
+
+SELECT u.name name, COUNT(u."friend-ids") count
+FROM FacebookUsers u;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d630d1a2/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/global-aggregate/q06_error/q06_error.1.ddl.sqlpp
----------------------------------------------------------------------
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/global-aggregate/q06_error/q06_error.1.ddl.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/global-aggregate/q06_error/q06_error.1.ddl.sqlpp
new file mode 100644
index 0000000..a7c021b
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/global-aggregate/q06_error/q06_error.1.ddl.sqlpp
@@ -0,0 +1,51 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+drop  database TinySocial if exists;
+create  database TinySocial;
+
+use TinySocial;
+
+
+create type TinySocial.TwitterUserType as
+{
+  "screen-name" : string
+}
+
+create type TinySocial.TweetMessageType as {
+  tweetid : string
+}
+
+create type TinySocial.FacebookUserType as
+ open {
+  id : int64
+}
+
+create type TinySocial.FacebookMessageType as
+ open {
+  "message-id" : int64
+}
+
+create  table FacebookUsers(FacebookUserType) primary key id;
+create  table FacebookMessages(FacebookMessageType) primary key "message-id";
+create  table TwitterUsers(TwitterUserType) primary key "screen-name";
+create  table TweetMessages(TweetMessageType) primary key tweetid hints ("CARDINALITY"="100");
+create  index fbUserSinceIdx  on FacebookUsers ("user-since":datetime) type btree enforced;
+create  index fbAuthorIdx  on FacebookMessages ("author-id":int64) type btree enforced;
+

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d630d1a2/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/global-aggregate/q06_error/q06_error.2.update.sqlpp
----------------------------------------------------------------------
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/global-aggregate/q06_error/q06_error.2.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/global-aggregate/q06_error/q06_error.2.update.sqlpp
new file mode 100644
index 0000000..4b757cd
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/global-aggregate/q06_error/q06_error.2.update.sqlpp
@@ -0,0 +1,30 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+use TinySocial;
+
+
+load  table FacebookUsers using "localfs" (("path"="asterix_nc1://data/tinysocial/fbu.adm"),("format"="adm"));
+
+load  table FacebookMessages using "localfs" (("path"="asterix_nc1://data/tinysocial/fbm.adm"),("format"="adm"));
+
+load  table TwitterUsers using "localfs" (("path"="asterix_nc1://data/tinysocial/twu.adm"),("format"="adm"));
+
+load  table TweetMessages using "localfs" (("path"="asterix_nc1://data/tinysocial/twm.adm"),("format"="adm"));
+

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d630d1a2/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/global-aggregate/q06_error/q06_error.3.query.sqlpp
----------------------------------------------------------------------
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/global-aggregate/q06_error/q06_error.3.query.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/global-aggregate/q06_error/q06_error.3.query.sqlpp
new file mode 100644
index 0000000..907afb0
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/global-aggregate/q06_error/q06_error.3.query.sqlpp
@@ -0,0 +1,23 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+USE TinySocial;
+
+SELECT COLL_COUNT(u.name) count
+FROM FacebookUsers u;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d630d1a2/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/global-aggregate/q07_error/q07_error.1.ddl.sqlpp
----------------------------------------------------------------------
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/global-aggregate/q07_error/q07_error.1.ddl.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/global-aggregate/q07_error/q07_error.1.ddl.sqlpp
new file mode 100644
index 0000000..a7c021b
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/global-aggregate/q07_error/q07_error.1.ddl.sqlpp
@@ -0,0 +1,51 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+drop  database TinySocial if exists;
+create  database TinySocial;
+
+use TinySocial;
+
+
+create type TinySocial.TwitterUserType as
+{
+  "screen-name" : string
+}
+
+create type TinySocial.TweetMessageType as {
+  tweetid : string
+}
+
+create type TinySocial.FacebookUserType as
+ open {
+  id : int64
+}
+
+create type TinySocial.FacebookMessageType as
+ open {
+  "message-id" : int64
+}
+
+create  table FacebookUsers(FacebookUserType) primary key id;
+create  table FacebookMessages(FacebookMessageType) primary key "message-id";
+create  table TwitterUsers(TwitterUserType) primary key "screen-name";
+create  table TweetMessages(TweetMessageType) primary key tweetid hints ("CARDINALITY"="100");
+create  index fbUserSinceIdx  on FacebookUsers ("user-since":datetime) type btree enforced;
+create  index fbAuthorIdx  on FacebookMessages ("author-id":int64) type btree enforced;
+

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d630d1a2/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/global-aggregate/q07_error/q07_error.2.update.sqlpp
----------------------------------------------------------------------
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/global-aggregate/q07_error/q07_error.2.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/global-aggregate/q07_error/q07_error.2.update.sqlpp
new file mode 100644
index 0000000..4b757cd
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/global-aggregate/q07_error/q07_error.2.update.sqlpp
@@ -0,0 +1,30 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+use TinySocial;
+
+
+load  table FacebookUsers using "localfs" (("path"="asterix_nc1://data/tinysocial/fbu.adm"),("format"="adm"));
+
+load  table FacebookMessages using "localfs" (("path"="asterix_nc1://data/tinysocial/fbm.adm"),("format"="adm"));
+
+load  table TwitterUsers using "localfs" (("path"="asterix_nc1://data/tinysocial/twu.adm"),("format"="adm"));
+
+load  table TweetMessages using "localfs" (("path"="asterix_nc1://data/tinysocial/twm.adm"),("format"="adm"));
+

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d630d1a2/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/global-aggregate/q07_error/q07_error.3.query.sqlpp
----------------------------------------------------------------------
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/global-aggregate/q07_error/q07_error.3.query.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/global-aggregate/q07_error/q07_error.3.query.sqlpp
new file mode 100644
index 0000000..67b3d68
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/global-aggregate/q07_error/q07_error.3.query.sqlpp
@@ -0,0 +1,26 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+USE TinySocial;
+
+COUNT(
+  ( SELECT u.name count
+    FROM FacebookUsers u
+  )
+);

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d630d1a2/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/global-aggregate/q08/q08.1.ddl.sqlpp
----------------------------------------------------------------------
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/global-aggregate/q08/q08.1.ddl.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/global-aggregate/q08/q08.1.ddl.sqlpp
new file mode 100644
index 0000000..a7c021b
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/global-aggregate/q08/q08.1.ddl.sqlpp
@@ -0,0 +1,51 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+drop  database TinySocial if exists;
+create  database TinySocial;
+
+use TinySocial;
+
+
+create type TinySocial.TwitterUserType as
+{
+  "screen-name" : string
+}
+
+create type TinySocial.TweetMessageType as {
+  tweetid : string
+}
+
+create type TinySocial.FacebookUserType as
+ open {
+  id : int64
+}
+
+create type TinySocial.FacebookMessageType as
+ open {
+  "message-id" : int64
+}
+
+create  table FacebookUsers(FacebookUserType) primary key id;
+create  table FacebookMessages(FacebookMessageType) primary key "message-id";
+create  table TwitterUsers(TwitterUserType) primary key "screen-name";
+create  table TweetMessages(TweetMessageType) primary key tweetid hints ("CARDINALITY"="100");
+create  index fbUserSinceIdx  on FacebookUsers ("user-since":datetime) type btree enforced;
+create  index fbAuthorIdx  on FacebookMessages ("author-id":int64) type btree enforced;
+

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d630d1a2/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/global-aggregate/q08/q08.2.update.sqlpp
----------------------------------------------------------------------
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/global-aggregate/q08/q08.2.update.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/global-aggregate/q08/q08.2.update.sqlpp
new file mode 100644
index 0000000..4b757cd
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/global-aggregate/q08/q08.2.update.sqlpp
@@ -0,0 +1,30 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+use TinySocial;
+
+
+load  table FacebookUsers using "localfs" (("path"="asterix_nc1://data/tinysocial/fbu.adm"),("format"="adm"));
+
+load  table FacebookMessages using "localfs" (("path"="asterix_nc1://data/tinysocial/fbm.adm"),("format"="adm"));
+
+load  table TwitterUsers using "localfs" (("path"="asterix_nc1://data/tinysocial/twu.adm"),("format"="adm"));
+
+load  table TweetMessages using "localfs" (("path"="asterix_nc1://data/tinysocial/twm.adm"),("format"="adm"));
+

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d630d1a2/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/global-aggregate/q08/q08.3.query.sqlpp
----------------------------------------------------------------------
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/global-aggregate/q08/q08.3.query.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/global-aggregate/q08/q08.3.query.sqlpp
new file mode 100644
index 0000000..9ff9b65
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/global-aggregate/q08/q08.3.query.sqlpp
@@ -0,0 +1,24 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+USE TinySocial;
+
+SELECT COLL_COUNT(u."friend-ids") count
+FROM FacebookUsers u
+ORDER BY u.id;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d630d1a2/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch/q06_forecast_revenue_change/q06_forecast_revenue_change.4.query.sqlpp
----------------------------------------------------------------------
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch/q06_forecast_revenue_change/q06_forecast_revenue_change.4.query.sqlpp b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch/q06_forecast_revenue_change/q06_forecast_revenue_change.4.query.sqlpp
new file mode 100644
index 0000000..1da2a8a
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch/q06_forecast_revenue_change/q06_forecast_revenue_change.4.query.sqlpp
@@ -0,0 +1,27 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+use tpch;
+
+
+select count(l) as count
+from  LineItem as l
+where l.l_shipdate >= '1994-01-01' and l.l_shipdate < '1995-01-01' and l.l_discount >= 0.05
+  and l.l_discount <= 0.07 and l.l_quantity < 24
+;


[39/50] [abbrv] incubator-asterixdb git commit: Move merged files

Posted by im...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d630d1a2/asterixdb/asterix-app/src/test/resources/runtimets/results/external-library/classad-parser-old/classad-parser-old.1.adm
----------------------------------------------------------------------
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/external-library/classad-parser-old/classad-parser-old.1.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/external-library/classad-parser-old/classad-parser-old.1.adm
new file mode 100644
index 0000000..56abc61
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/external-library/classad-parser-old/classad-parser-old.1.adm
@@ -0,0 +1,5 @@
+{ "GlobalJobId": "crab3-1@submit-5.t2.ucsd.edu#1206367.0#1439615565", "CRAB_UserGroup": "undefined", "JobStartDate": 1439615574, "CRAB_UserDN": "/DC=ch/DC=cern/OU=Organic Units/OU=Users/CN=ferencek/CN=650164/CN=Dinko Ferencek", "JobStatus": 4, "CRAB_TFileOutputFiles": "{  }", "LeaveJobInQueue": "JobStatus == 4 && ( CompletionDate =?= UNDDEFINED || CompletionDate == 0 || ( ( time() - CompletionDate ) < 864000 ) )", "DAG_InRecovery": 0, "AutoClusterId": 10378, "CRAB_TaskWorker": "vocms052", "OnExitRemove": "( ExitSignal =?= 11 || ( ExitCode =!= undefined && ExitCode >= 0 && ExitCode <= 2 ) )", "MaxWallTimeMins_RAW": 1315, "JobCurrentStartDate": 1439615574, "CRAB_ASOTimeout": 86400, "CoreSize": -1, "CRAB_AsyncDest": "T3_US_FNALLPC", "StageInFinish": 1439615572, "ExitStatus": 0, "ReleaseReason": "Data files spooled", "CRAB_DBSURL": "https://cmsweb.cern.ch/dbs/prod/global/DBSReader", "x509userproxyexpiration": 1440294044, "CurrentTime": "time()", "X509UserProxy": "3a7798796bc24a800001338
 917ec45991bcf0a96", "WantCheckpoint": false, "RemoteWallClockTime": 158333.0d, "In": "/dev/null", "MaxHosts": 1, "RootDir": "/", "NumRestarts": 0, "DiskUsage_RAW": 1, "DAG_Status": 0, "SUBMIT_x509userproxy": "/data/certs/creds/3a7798796bc24a800001338917ec45991bcf0a96", "EnteredCurrentStatus": 1439773907, "CRAB_RestURInoAPI": "/crabserver/prod", "HoldKillSig": "SIGUSR1", "RequestDisk": "DiskUsage", "MyType": "Job", "PeriodicRemove": "( JobStatus == 5 ) && ( time() - EnteredCurrentStatus > 30 * 86400 )", "RemoveKillSig": "SIGUSR1", "CRAB_ISB": "https://cmsweb.cern.ch/crabcache", "Cmd": "dag_bootstrap_startup.sh", "CondorVersion": "$CondorVersion: 8.3.1 Jun 19 2015 $", "DAG_NodesReady": 0, "CRAB_AdditionalOutputFiles": "{  }", "ShouldTransferFiles": "YES", "CRAB_Workflow": "150815_044810:ferencek_crab_Stop2ToStop1H_Stop1M200_TuneCUETP8M1_13TeV-madgraph-pythia8_LHE", "TargetType": "Machine", "MinHosts": 1, "NumCkpts_RAW": 0, "RequestCpus": 1, "CRAB_UserRole": "undefined", "RemoteUserCpu
 ": 0.0d, "NiceUser": false, "CRAB_AlgoArgs": "{\"splitOnRun\": false, \"events_per_job\": {\"halt_job_on_file_boundaries\": false, \"events_per_lumi\": 100, \"algorithm\": \"EventBased\", \"applyLumiCorrection\": true, \"runs\": [], \"lumis\": [], \"lheInputFiles\": true, \"splitOnRun\": false, \"events_per_job\": 50000}, \"halt_job_on_file_boundaries\": false}", "Out": "_condor_stdout", "ImageSize_RAW": 100, "DAG_NodesPostrun": 0, "CRAB_JobArch": "slc6_amd64_gcc481", "CumulativeSuspensionTime": 0, "AutoClusterAttrs": "CheckpointPlatform,DESIRED_Gatekeepers,DESIRED_Sites,MaxWallTimeMins,RequestMemory,REQUIRED_OS,JobUniverse,LastCheckpointPlatform,NumCkpts,x509userproxyfirstfqan,x509userproxysubject,MachineLastMatchTime,DynamicSlot,PartitionableSlot,Slot1_ExpectedMachineGracefulDrainingCompletion,Slot1_JobStarts,Slot1_SelfMonitorAge,Slot1_TotalTimeClaimedBusy,Slot1_TotalTimeUnclaimedIdle,CMS_ALLOW_OVERFLOW,CRAB_UserRole,DESIRED_Overflow_Region,WMAgent_AgentName,CMSGroups,_condor_Requ
 estCpus,_condor_RequestDisk,_condor_RequestMemory,RequestCpus,RequestDisk,WithinResourceLimits,opportunistic_job,ConcurrencyLimits,NiceUser,Rank,Requirements,DiskUsage", "LastHoldReasonCode": 16, "NumCkpts": 0, "CRAB_BlacklistT1": 0, "Err": "_condor_stderr", "JobFinishedHookDone": 1439773907, "RequestMemory_RAW": 2000, "TransferOutputRemaps": "undefined", "ProcId": 0, "ImageSize": 100, "JobUniverse": 7, "DAG_NodesTotal": 30, "CRAB_JobType": "analysis", "SUBMIT_Iwd": "/data/srv/tmp/_150815_044810:ferencek_crab_Stop2ToStop1H_Stop1M200_TuneCUETP8M1_13TeV-madgraph-pythia8_LHEF9C8tB", "WhenToTransferOutput": "ON_EXIT", "ExitBySignal": false, "OnExitHold": "( ExitCode =!= undefined && ExitCode != 0 )", "OrigMaxHosts": 1, "RequestMemory": 2000, "NumJobStarts": 1, "CRAB_UserHN": "ferencek", "LastHoldReason": "Spooling input data files", "TotalSuspensions": 0, "CRAB_FailedNodeLimit": -1, "ExitCode": 0, "CRAB_PublishName": "LHE-17521057f93ed9cadf21dd45b3505145", "CRAB_UserWebDir": "http://sub
 mit-5.t2.ucsd.edu/CSstoragePath/67/uscms5050/150815_044810:ferencek_crab_Stop2ToStop1H_Stop1M200_TuneCUETP8M1_13TeV-madgraph-pythia8_LHE", "JobNotification": 0, "CRAB_DashboardTaskType": "analysis", "SUBMIT_TransferOutputRemaps": "_condor_stdout=/data/srv/tmp/_150815_044810:ferencek_crab_Stop2ToStop1H_Stop1M200_TuneCUETP8M1_13TeV-madgraph-pythia8_LHEF9C8tB/request.out;_condor_stderr=/data/srv/tmp/_150815_044810:ferencek_crab_Stop2ToStop1H_Stop1M200_TuneCUETP8M1_13TeV-madgraph-pythia8_LHEF9C8tB/request.err", "CRAB_PublishDBSURL": "https://cmsweb.cern.ch/dbs/prod/phys03/DBSWriter", "LocalUserCpu": 0.0d, "BufferBlockSize": 32768, "LastJobStatus": 2, "CommittedTime": 0, "CRAB_SaveLogsFlag": 0, "LastSuspensionTime": 0, "TaskType": "ROOT", "DAG_NodesDone": 30, "CumulativeSlotTime": 158333.0d, "TransferOutput": "RunJobs.dag.dagman.out, RunJobs.dag.rescue.001", "NumSystemHolds": 0, "CRAB_RetryOnASOFailures": 1, "OtherJobRemoveRequirements": "DAGManJobId =?= ClusterId", "CondorPlatform": "$C
 ondorPlatform: X86_64-ScientificLinux_6.6 $", "PeriodicRelease": false, "JobRunCount": 1, "CRAB_Publish": 1, "JobPrio": 10, "CRAB_TransferOutputs": 1, "CRAB_Attempt": 0, "LocalSysCpu": 0.0d, "RemoteSysCpu": 0.0d, "TransferInput": "gWMS-CMSRunAnalysis.sh, CMSRunAnalysis.sh, cmscp.py, RunJobs.dag, Job.submit, dag_bootstrap.sh, AdjustSites.py, site.ad, site.ad.json, run_and_lumis.tar.gz, sandbox.tar.gz, CMSRunAnalysis.tar.gz, TaskManagerRun.tar.gz", "PeriodicHold": false, "CRAB_NumAutomJobRetries": 2, "CRAB_LumiMask": "{}", "CRAB_InputData": "/Stop2ToStop1H_Stop1M200_TuneCUETP8M1_13TeV-madgraph-pythia8", "WantRemoteIO": true, "CommittedSuspensionTime": 0, "CRAB_JobSW": "CMSSW_7_1_18", "StageInStart": 1439615569, "CRAB_SiteWhitelist": "{ \"T3_US_FNALLPC\",\"T2_US_Purdue\",\"T2_US_Nebraska\" }", "CompletionDate": 1439773907, "StreamErr": false, "CRAB_RestHost": "cmsweb.cern.ch", "RemoteCondorSetup": "", "CRAB_ReqName": "150815_044810:ferencek_crab_Stop2ToStop1H_Stop1M200_TuneCUETP8M1_13T
 eV-madgraph-pythia8_LHE", "DAG_NodesPrerun": 0, "WantRemoteSyscalls": false, "DAG_NodesQueued": 0, "DAG_NodesUnready": 0, "Owner": "uscms5050", "Requirements": "true || false && TARGET.OPSYS == \"LINUX\" && TARGET.ARCH == \"X86_64\" && TARGET.HasFileTransfer && TARGET.Disk >= RequestDisk && TARGET.Memory >= RequestMemory", "CRAB_JobCount": 30, "CRAB_ASOURL": "https://cmsweb.cern.ch/couchdb", "CRAB_SplitAlgo": "EventBased", "DiskUsage": 1, "CRAB_MaxPost": 20, "ClusterId": 1206367, "BufferSize": 524288, "DAG_NodesFailed": 0, "MaxWallTimeMins": 1400, "CRAB_PublishGroupName": 0, "CommittedSlotTime": 0, "CRAB_SiteBlacklist": "{  }", "Args": "RunJobs.dag", "CRAB_EDMOutputFiles": "{ \"Stop2ToStop1H_Stop1M200_TuneCUETP8M1_13TeV-madgraph-pythia8_LHE.root\" }", "Environment": "strcat(\"PATH=/usr/bin:/bin CRAB3_VERSION=3.3.0-pre1 CONDOR_ID=\",ClusterId,\".\",ProcId,\" CRAB_RUNTIME_TARBALL=local CRAB_TASKMANAGER_TARBALL=local\")", "CRAB_UserVO": "cms", "Iwd": "/data/condor_local/spool/6367/0/cl
 uster1206367.proc0.subproc0", "QDate": 1439615565, "CurrentHosts": 0, "User": "uscms5050@cms", "StreamOut": false }
+{ "GlobalJobId": "crab3-1@submit-5.t2.ucsd.edu#1217455.0#1439764883", "CRAB_UserGroup": "dcms", "JobStartDate": 1439764892, "CRAB_UserDN": "/C=DE/O=GermanGrid/OU=KIT/CN=Andrew Gilbert", "JobStatus": 4, "CRAB_TFileOutputFiles": "{  }", "LeaveJobInQueue": "JobStatus == 4 && ( CompletionDate =?= UNDDEFINED || CompletionDate == 0 || ( ( time() - CompletionDate ) < 864000 ) )", "DAG_InRecovery": 0, "AutoClusterId": 10378, "CRAB_TaskWorker": "vocms052", "OnExitRemove": "( ExitSignal =?= 11 || ( ExitCode =!= undefined && ExitCode >= 0 && ExitCode <= 2 ) )", "MaxWallTimeMins_RAW": 1315, "JobCurrentStartDate": 1439764892, "CRAB_ASOTimeout": 86400, "CoreSize": -1, "CRAB_AsyncDest": "T2_CH_CERN", "StageInFinish": 1439764891, "ExitStatus": 0, "ReleaseReason": "Data files spooled", "CRAB_DBSURL": "https://cmsweb.cern.ch/dbs/prod/global/DBSReader", "CurrentTime": "time()", "X509UserProxy": "63f0c4d862d8b4e4ddcfd29ed85b6b5899660759", "WantCheckpoint": false, "RemoteWallClockTime": 82427.0d, "In": 
 "/dev/null", "MaxHosts": 1, "RootDir": "/", "NumRestarts": 0, "DiskUsage_RAW": 1, "DAG_Status": 0, "SUBMIT_x509userproxy": "/data/certs/creds/63f0c4d862d8b4e4ddcfd29ed85b6b5899660759", "EnteredCurrentStatus": 1439847319, "CRAB_RestURInoAPI": "/crabserver/prod", "HoldKillSig": "SIGUSR1", "RequestDisk": "DiskUsage", "MyType": "Job", "PeriodicRemove": "( JobStatus == 5 ) && ( time() - EnteredCurrentStatus > 30 * 86400 )", "RemoveKillSig": "SIGUSR1", "CRAB_ISB": "https://cmsweb.cern.ch/crabcache", "Cmd": "dag_bootstrap_startup.sh", "CondorVersion": "$CondorVersion: 8.3.1 Jun 19 2015 $", "DAG_NodesReady": 0, "CRAB_AdditionalOutputFiles": "{ \"combine_output.tar\" }", "ShouldTransferFiles": "YES", "CRAB_Workflow": "150816_222636:agilbert_crab_prefit_cms_asimov_A1_5D", "TargetType": "Machine", "MinHosts": 1, "NumCkpts_RAW": 0, "RequestCpus": 1, "CRAB_UserRole": "undefined", "RemoteUserCpu": 0.0d, "NiceUser": false, "CRAB_AlgoArgs": "{\"splitOnRun\": false, \"events_per_job\": {\"halt_job_o
 n_file_boundaries\": false, \"events_per_lumi\": 100, \"algorithm\": \"EventBased\", \"applyLumiCorrection\": true, \"runs\": [], \"lumis\": [], \"splitOnRun\": false, \"events_per_job\": 1}, \"halt_job_on_file_boundaries\": false}", "Out": "_condor_stdout", "ImageSize_RAW": 100, "DAG_NodesPostrun": 0, "CRAB_JobArch": "slc6_amd64_gcc491", "CumulativeSuspensionTime": 0, "AutoClusterAttrs": "CheckpointPlatform,DESIRED_Gatekeepers,DESIRED_Sites,MaxWallTimeMins,RequestMemory,REQUIRED_OS,JobUniverse,LastCheckpointPlatform,NumCkpts,x509userproxyfirstfqan,x509userproxysubject,MachineLastMatchTime,DynamicSlot,PartitionableSlot,Slot1_ExpectedMachineGracefulDrainingCompletion,Slot1_JobStarts,Slot1_SelfMonitorAge,Slot1_TotalTimeClaimedBusy,Slot1_TotalTimeUnclaimedIdle,CMS_ALLOW_OVERFLOW,CRAB_UserRole,DESIRED_Overflow_Region,WMAgent_AgentName,CMSGroups,_condor_RequestCpus,_condor_RequestDisk,_condor_RequestMemory,RequestCpus,RequestDisk,WithinResourceLimits,opportunistic_job,ConcurrencyLimits,N
 iceUser,Rank,Requirements,DiskUsage", "LastHoldReasonCode": 16, "NumCkpts": 0, "CRAB_BlacklistT1": 0, "Err": "_condor_stderr", "JobFinishedHookDone": 1439847319, "RequestMemory_RAW": 2000, "TransferOutputRemaps": "undefined", "ProcId": 0, "ImageSize": 100, "JobUniverse": 7, "DAG_NodesTotal": 25, "CRAB_JobType": "analysis", "SUBMIT_Iwd": "/data/srv/tmp/_150816_222636:agilbert_crab_prefit_cms_asimov_A1_5DYpFxP9", "WhenToTransferOutput": "ON_EXIT", "ExitBySignal": false, "OnExitHold": "( ExitCode =!= undefined && ExitCode != 0 )", "OrigMaxHosts": 1, "RequestMemory": 2000, "NumJobStarts": 1, "CRAB_UserHN": "agilbert", "LastHoldReason": "Spooling input data files", "TotalSuspensions": 0, "CRAB_FailedNodeLimit": -1, "ExitCode": 0, "CRAB_PublishName": "prefit_cms_asimov_A1_5D-59ffde2b5d41be5f0c401d0a6a8a0194", "CRAB_UserWebDir": "http://submit-5.t2.ucsd.edu/CSstoragePath/73/uscms5616/150816_222636:agilbert_crab_prefit_cms_asimov_A1_5D", "JobNotification": 0, "CRAB_DashboardTaskType": "anal
 ysis", "SUBMIT_TransferOutputRemaps": "_condor_stdout=/data/srv/tmp/_150816_222636:agilbert_crab_prefit_cms_asimov_A1_5DYpFxP9/request.out;_condor_stderr=/data/srv/tmp/_150816_222636:agilbert_crab_prefit_cms_asimov_A1_5DYpFxP9/request.err", "CRAB_PublishDBSURL": "https://cmsweb.cern.ch/dbs/prod/phys03/DBSWriter", "LocalUserCpu": 0.0d, "BufferBlockSize": 32768, "LastJobStatus": 2, "CommittedTime": 0, "CRAB_SaveLogsFlag": 0, "LastSuspensionTime": 0, "TaskType": "ROOT", "DAG_NodesDone": 25, "CumulativeSlotTime": 82427.0d, "TransferOutput": "RunJobs.dag.dagman.out, RunJobs.dag.rescue.001", "NumSystemHolds": 0, "CRAB_RetryOnASOFailures": 1, "OtherJobRemoveRequirements": "DAGManJobId =?= ClusterId", "CondorPlatform": "$CondorPlatform: X86_64-ScientificLinux_6.6 $", "PeriodicRelease": false, "JobRunCount": 1, "CRAB_Publish": 0, "JobPrio": 10, "CRAB_TransferOutputs": 1, "CRAB_Attempt": 0, "LocalSysCpu": 0.0d, "RemoteSysCpu": 0.0d, "TransferInput": "gWMS-CMSRunAnalysis.sh, CMSRunAnalysis.sh,
  cmscp.py, RunJobs.dag, Job.submit, dag_bootstrap.sh, AdjustSites.py, site.ad, site.ad.json, run_and_lumis.tar.gz, sandbox.tar.gz, CMSRunAnalysis.tar.gz, TaskManagerRun.tar.gz", "PeriodicHold": false, "CRAB_NumAutomJobRetries": 2, "CRAB_LumiMask": "{}", "CRAB_InputData": "/MinBias", "WantRemoteIO": true, "CommittedSuspensionTime": 0, "CRAB_JobSW": "CMSSW_7_4_0_pre9", "StageInStart": 1439764886, "CRAB_SiteWhitelist": "{  }", "CompletionDate": 1439847319, "StreamErr": false, "CRAB_RestHost": "cmsweb.cern.ch", "RemoteCondorSetup": "", "CRAB_ReqName": "150816_222636:agilbert_crab_prefit_cms_asimov_A1_5D", "DAG_NodesPrerun": 0, "WantRemoteSyscalls": false, "DAG_NodesQueued": 0, "DAG_NodesUnready": 0, "Owner": "uscms5616", "Requirements": "true || false && TARGET.OPSYS == \"LINUX\" && TARGET.ARCH == \"X86_64\" && TARGET.HasFileTransfer && TARGET.Disk >= RequestDisk && TARGET.Memory >= RequestMemory", "CRAB_JobCount": 25, "CRAB_ASOURL": "https://cmsweb.cern.ch/couchdb", "CRAB_SplitAlgo": "
 EventBased", "DiskUsage": 1, "CRAB_MaxPost": 20, "ClusterId": 1217455, "BufferSize": 524288, "DAG_NodesFailed": 0, "MaxWallTimeMins": 1400, "CRAB_PublishGroupName": 0, "CommittedSlotTime": 0, "CRAB_SiteBlacklist": "{ \"T2_FR_CCIN2P3\",\"T1_IT_CNAF\",\"T1_ES_PIC\",\"T1_UK_RAL\",\"T2_FI_HIP\",\"T2_US_Nebraska\" }", "Args": "RunJobs.dag", "CRAB_EDMOutputFiles": "{  }", "Environment": "strcat(\"PATH=/usr/bin:/bin CRAB3_VERSION=3.3.0-pre1 CONDOR_ID=\",ClusterId,\".\",ProcId,\" CRAB_RUNTIME_TARBALL=local CRAB_TASKMANAGER_TARBALL=local\")", "CRAB_UserVO": "cms", "Iwd": "/data/condor_local/spool/7455/0/cluster1217455.proc0.subproc0", "QDate": 1439764883, "CurrentHosts": 0, "User": "uscms5616@cms", "StreamOut": false }
+{ "GlobalJobId": "crab3-1@submit-5.t2.ucsd.edu#1233705.0#1439964847", "PostJobPrio1": -1439209593, "MATCH_EXP_JOB_GLIDECLIENT_Name": "CMSG-v1_0.main", "PostJobPrio2": 2, "JobStartDate": 1439965560, "CRAB_UserGroup": "undefined", "MATCH_EXP_JOB_GLIDEIN_MaxMemMBs": "2800", "MATCH_GLIDEIN_Name": "gfactory_instance", "JobStatus": 4, "JOB_GLIDEIN_CMSSite": "$$(GLIDEIN_CMSSite:Unknown)", "CRAB_TaskWorker": "vocms052", "MATCH_GLIDEIN_Max_Walltime": 603000, "JOB_GLIDEIN_SiteWMS": "$$(GLIDEIN_SiteWMS:Unknown)", "CoreSize": -1, "CRAB_AsyncDest": "T2_KR_KNU", "MATCH_EXP_JOB_Site": "CERN", "Rank": 0.0d, "JOB_GLIDEIN_Memory": "$$(Memory:Unknown)", "WantCheckpoint": false, "RemoteWallClockTime": 165965.0d, "JOB_GLIDEIN_Name": "$$(GLIDEIN_Name:Unknown)", "DESIRED_SITES": "T1_DE_KIT,T2_UK_London_IC,T2_CH_CERN", "MaxHosts": 1, "MATCH_EXP_JOB_CMSSite": "T2_CH_CERN", "RootDir": "/", "JOB_GLIDEIN_ToDie": "$$(GLIDEIN_ToDie:Unknown)", "RecentBlockWriteKbytes": 0, "CRAB_Dest": "/store/temp/user/kbutanov.0
 3af76ad04ddc195ee96e6a5469f1bbb1777390d/HWWwidthRun2/LatinoTrees_V4/WJetsToLNu_HT-600To800_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/crab_25ns_WJetsToLNu_HT600_800/150810_122536", "JOB_GLIDEIN_SiteWMS_Queue": "$$(GLIDEIN_SiteWMS_Queue:Unknown)", "CRAB_RestURInoAPI": "/crabserver/prod", "EnteredCurrentStatus": 1440131525, "MATCH_EXP_JOBGLIDEIN_CMSSite": "T2_CH_CERN", "RequestDisk": 100000, "MATCH_EXP_JOB_GLIDEIN_SiteWMS": "LSF", "MyType": "Job", "MATCH_GLIDEIN_ClusterId": 59069, "PeriodicRemove": "( ( JobStatus =?= 5 ) && ( time() - EnteredCurrentStatus > 7 * 60 ) ) || ( ( JobStatus =?= 2 ) && ( ( MemoryUsage > RequestMemory ) || ( MaxWallTimeMins * 60 < time() - EnteredCurrentStatus ) || ( DiskUsage > 100000000 ) ) ) || ( ( JobStatus =?= 1 ) && ( time() > ( x509UserProxyExpiration + 86400 ) ) )", "CondorVersion": "$CondorVersion: 8.3.5 Apr 16 2015 BuildID: 315103 $", "MATCH_GLIDEIN_Gatekeeper": "ce302.cern.ch:8443/cream-lsf-grid_cms", "CRAB_Workflow": "150810_122536:kbutanov_crab_25ns_
 WJetsToLNu_HT600_800", "NumCkpts_RAW": 0, "MinHosts": 1, "RequestCpus": 1, "RemoteUserCpu": 163084.0d, "MATCH_EXP_JOB_GLIDEIN_Schedd": "schedd_glideins3@cmsgwms-factory.fnal.gov", "BytesSent": 119952.0d, "MATCH_GLIDEIN_Schedd": "schedd_glideins3@cmsgwms-factory.fnal.gov", "AutoClusterAttrs": "CheckpointPlatform,DESIRED_Gatekeepers,DESIRED_Sites,MaxWallTimeMins,RequestMemory,REQUIRED_OS,JobUniverse,LastCheckpointPlatform,NumCkpts,x509userproxyfirstfqan,x509userproxysubject,MachineLastMatchTime,DynamicSlot,PartitionableSlot,Slot1_ExpectedMachineGracefulDrainingCompletion,Slot1_JobStarts,Slot1_SelfMonitorAge,Slot1_TotalTimeClaimedBusy,Slot1_TotalTimeUnclaimedIdle,CMS_ALLOW_OVERFLOW,CRAB_UserRole,DESIRED_Overflow_Region,WMAgent_AgentName,CMSGroups,_condor_RequestCpus,_condor_RequestDisk,_condor_RequestMemory,RequestCpus,RequestDisk,WithinResourceLimits,opportunistic_job,ConcurrencyLimits,NiceUser,Rank,Requirements", "MATCH_GLIDEIN_SiteWMS_Queue": "grid_cms", "NumCkpts": 0, "JobFinishedH
 ookDone": 1440131525, "ImageSize": 4250000, "JobUniverse": 5, "EncryptExecuteDirectory": false, "MATCH_GLIDEIN_ToRetire": 1440530096, "CRAB_JobType": "analysis", "EnvDelim": ";", "ExitBySignal": false, "LastMatchTime": 1439965560, "OrigMaxHosts": 1, "MATCH_EXP_JOB_GLIDEIN_CMSSite": "T2_CH_CERN", "MATCH_EXP_Used_Gatekeeper": "ce302.cern.ch:8443/cream-lsf-grid_cms", "MATCH_EXP_JOB_GLIDEIN_Entry_Name": "CMS_T2_CH_CERN_ce302", "TerminationPending": true, "CRAB_UserHN": "kbutanov", "BlockReads": 0, "DAGManJobId": 1035690, "MATCH_GLIDEIN_SEs": "srm-eoscms.cern.ch", "MemoryUsage": "( ( ResidentSetSize + 1023 ) / 1024 )", "PeriodicRemoveReason": "ifThenElse(MemoryUsage > RequestMemory,\"Removed due to memory use\",ifThenElse(MaxWallTimeMins * 60 < time() - EnteredCurrentStatus,\"Removed due to wall clock limit\",ifThenElse(DiskUsage > 100000000,\"Removed due to disk usage\",ifThenElse(time() > x509UserProxyExpiration,\"Removed job due to proxy expiration\",\"Removed due to job being held\")
 )))", "JobNotification": 0, "AcctGroup": "analysis", "LocalUserCpu": 0.0d, "NumJobMatches": 1, "MATCH_GLIDEIN_Entry_Name": "CMS_T2_CH_CERN_ce302", "ExecutableSize_RAW": 6, "CRAB_SaveLogsFlag": 1, "UserLog": "/data/condor_local/spool/5690/0/cluster1035690.proc0.subproc0/job_log", "use_x509userproxy": true, "DAGManNodesMask": "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27", "MATCH_EXP_JOB_GLIDEIN_ToRetire": "1440530096", "LastJobLeaseRenewal": 1440131524, "AcctGroupUser": "uscms5111", "MATCH_EXP_JOB_GLIDEIN_Factory": "gfactory_service", "JOB_GLIDEIN_Job_Max_Time": "$$(GLIDEIN_Job_Max_Time:Unknown)", "TransferOutput": "jobReport.json.53", "x509UserProxyEmail": "khakimjan.butanov@cern.ch", "CRAB_localOutputFiles": "stepB_MC.root=stepB_MC_53.root", "CondorPlatform": "$CondorPlatform: X86_64-RedHat_6.6 $", "MATCH_GLIDEIN_Factory": "gfactory_service", "accounting_group": "analysis", "DAGNodeName": "Job53", "PeriodicRelease": "( HoldReasonCode == 28 ) || ( HoldReasonCode == 30 ) || ( HoldReasonCode
  == 13 ) || ( HoldReasonCode == 6 )", "CRAB_Publish": 1, "MATCH_EXP_JOB_GLIDEIN_ClusterId": "59069", "MATCH_GLIDEIN_MaxMemMBs": 2800, "JOB_GLIDEIN_SiteWMS_Slot": "$$(GLIDEIN_SiteWMS_Slot:Unknown)", "JOB_GLIDEIN_Entry_Name": "$$(GLIDEIN_Entry_Name:Unknown)", "TransferInput": "CMSRunAnalysis.sh,cmscp.py,CMSRunAnalysis.tar.gz,sandbox.tar.gz,run_and_lumis.tar.gz", "CRAB_NumAutomJobRetries": 2, "CommittedSuspensionTime": 0, "CRAB_DataBlock": "/WJetsToLNu_HT-600To800_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/RunIISpring15DR74-Asympt25ns_MCRUN2_74_V9-v2/MINIAODSIM#85bfee36-3b82-11e5-be34-001e67abf518", "MATCH_EXP_JOB_GLIDEIN_Name": "gfactory_instance", "StreamErr": false, "CRAB_ReqName": "150810_122536:kbutanov_crab_25ns_WJetsToLNu_HT600_800", "CMS_ALLOW_OVERFLOW": "True", "RecentBlockReadKbytes": 0, "WantRemoteSyscalls": false, "MATCH_GLIDEIN_Job_Max_Time": 34800, "NumShadowStarts": 1, "MachineAttrCpus0": 1, "DESIRED_OpSyses": "LINUX", "DAGManNodesLog": "/data/condor_local/spool/5690/0/clust
 er1035690.proc0.subproc0/RunJobs.dag.nodes.log", "Owner": "uscms5111", "PreJobPrio1": 1, "DiskUsage": 75000, "CRAB_ASOURL": "https://cmsweb.cern.ch/couchdb", "JobLeaseDuration": 1200, "x509UserProxyFirstFQAN": "/cms/Role=NULL/Capability=NULL", "Env": "CRAB_TASKMANAGER_TARBALL=local;SCRAM_ARCH=slc6_amd64_gcc491;CRAB_RUNTIME_TARBALL=local", "MaxWallTimeMins": 2800, "CRAB_SiteBlacklist": "{  }", "JOB_GLIDEIN_MaxMemMBs": "$$(GLIDEIN_MaxMemMBs:Unknown)", "RecentStatsLifetimeStarter": 1200, "CRAB_EDMOutputFiles": "{  }", "MATCH_GLIDEIN_CMSSite": "T2_CH_CERN", "Iwd": "/data/condor_local/spool/5690/0/cluster1035690.proc0.subproc0", "JOB_GLIDEIN_SEs": "$$(GLIDEIN_SEs:Unknown)", "StreamOut": false, "JobAdInformationAttrs": "MATCH_EXP_JOBGLIDEIN_CMSSite, JOBGLIDEIN_CMSSite, RemoteSysCpu, RemoteUserCpu", "StatsLifetimeStarter": 165949, "SubmitEventNotes": "DAG Node: Job53", "CRAB_UserDN": "/DC=ch/DC=cern/OU=Organic Units/OU=Users/CN=kbutanov/CN=727362/CN=Khakimjan Butanov", "CRAB_TFileOutputFil
 es": "{ \"stepB_MC.root\" }", "LeaveJobInQueue": false, "AutoClusterId": 16275, "StartdPrincipal": "execute-side@matchsession/128.142.45.103", "JOB_Gatekeeper": "ifthenelse(substr(Used_Gatekeeper,0,1) =!= \"$\",Used_Gatekeeper,ifthenelse(MATCH_GLIDEIN_Gatekeeper =!= undefined,MATCH_GLIDEIN_Gatekeeper,\"Unknown\"))", "MATCH_Memory": 2800, "MATCH_GLIDEIN_SiteWMS_Slot": "Unknown", "CRAB_StageoutPolicy": "local,remote", "OnExitRemove": true, "MaxWallTimeMins_RAW": 2800, "JobCurrentStartDate": 1439965560, "CRAB_ASOTimeout": 86400, "ExitStatus": 0, "JOB_GLIDECLIENT_Name": "$$(GLIDECLIENT_Name:Unknown)", "CRAB_Destination": "srm://cluster142.knu.ac.kr:8443/srm/managerv2?SFN=/pnfs/knu.ac.kr/data/cms/store/user/kbutanov/HWWwidthRun2/LatinoTrees_V4/WJetsToLNu_HT-600To800_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/crab_25ns_WJetsToLNu_HT600_800/150810_122536/0000/log/cmsRun_53.log.tar.gz, srm://cluster142.knu.ac.kr:8443/srm/managerv2?SFN=/pnfs/knu.ac.kr/data/cms/store/user/kbutanov/HWWwidthRun2/La
 tinoTrees_V4/WJetsToLNu_HT-600To800_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/crab_25ns_WJetsToLNu_HT600_800/150810_122536/0000/stepB_MC_53.root", "CRAB_DBSURL": "https://cmsweb.cern.ch/dbs/prod/global/DBSReader", "ResidentSetSize": 1250000, "x509UserProxyExpiration": 1440397268, "x509userproxy": "/data/condor_local/spool/5690/0/cluster1035690.proc0.subproc0/8123da6528ec4abd24562a99b4f2b0ec556bed0b", "JOB_GLIDEIN_SiteWMS_JobId": "$$(GLIDEIN_SiteWMS_JobId:Unknown)", "In": "/dev/null", "MATCH_GLIDEIN_SiteWMS": "LSF", "NumRestarts": 0, "DiskUsage_RAW": 61434, "JOB_Site": "$$(GLIDEIN_Site:Unknown)", "MATCH_GLIDEIN_SiteWMS_JobId": "689255460", "ResidentSetSize_RAW": 1238992, "x509userproxysubject": "/DC=ch/DC=cern/OU=Organic Units/OU=Users/CN=kbutanov/CN=727362/CN=Khakimjan Butanov", "JOBGLIDEIN_CMSSite": "$$([ifThenElse(GLIDEIN_CMSSite is undefined, \"Unknown\", GLIDEIN_CMSSite)])", "CRAB_ISB": "https://cmsweb.cern.ch/crabcache", "Cmd": "/data/condor_local/spool/5690/0/cluster1035690.proc0
 .subproc0/gWMS-CMSRunAnalysis.sh", "RequestDisk_RAW": 1, "CRAB_AdditionalOutputFiles": "{  }", "ShouldTransferFiles": "YES", "TargetType": "Machine", "MATCH_EXP_JOB_GLIDEIN_Job_Max_Time": "34800", "CRAB_UserRole": "undefined", "BlockWrites": 0, "NiceUser": false, "Out": "job_out.53", "ImageSize_RAW": 4095188, "JOB_CMSSite": "$$(GLIDEIN_CMSSite:Unknown)", "x509UserProxyVOName": "cms", "DESIRED_Overflow_Region": "regexps(\"T[12]_US_\",DESIRED_Sites,\"US\")", "CRAB_JobArch": "slc6_amd64_gcc491", "CumulativeSuspensionTime": 0, "JOB_GLIDEIN_Site": "$$(GLIDEIN_Site:Unknown)", "MATCH_EXP_JOB_GLIDEIN_Memory": "2800", "TransferIn": false, "MATCH_EXP_JOB_GLIDEIN_ProcId": "1", "CRAB_BlacklistT1": 0, "Err": "job_err.53", "RecentBlockWrites": 0, "RequestMemory_RAW": 2000, "ProcId": 0, "RecentBlockReads": 0, "TransferInputSizeMB": 2, "NumJobReconnects": 2, "SpooledOutputFiles": "jobReport.json.53", "MATCH_GLIDEIN_Site": "CERN", "BlockWriteKbytes": 0, "SpoolOnEvict": false, "WhenToTransferOutput":
  "ON_EXIT_OR_EVICT", "MATCH_GLIDECLIENT_Name": "CMSG-v1_0.main", "JobCurrentStartExecutingDate": 1439965573, "MATCH_GLIDEIN_ProcId": 1, "DESIRED_OpSysMajorVers": "6", "OnExitHold": false, "RequestMemory": 2000, "NumJobStarts": 1, "JOB_GLIDEIN_ToRetire": "$$(GLIDEIN_ToRetire:Unknown)", "JOB_GLIDEIN_Schedd": "$$(GLIDEIN_Schedd:Unknown)", "TotalSuspensions": 0, "ExitCode": 0, "CRAB_PublishName": "crab_25ns_WJetsToLNu_HT600_800-9da7f68dc2032d8626d7e7822bb10506", "CRAB_UserWebDir": "http://submit-5.t2.ucsd.edu/CSstoragePath/68/uscms5111/150810_122536:kbutanov_crab_25ns_WJetsToLNu_HT600_800", "BlockReadKbytes": 0, "AccountingGroup": "analysis.kbutanov", "MATCH_EXP_JOB_GLIDEIN_ToDie": "1440564896", "CRAB_PublishDBSURL": "https://cmsweb.cern.ch/dbs/prod/phys03/DBSWriter", "MATCH_EXP_JOB_GLIDEIN_Site": "CERN", "LastJobStatus": 2, "BufferBlockSize": 32768, "CommittedTime": 165965, "CRAB_Retry": 2, "LastSuspensionTime": 0, "MATCH_EXP_JOB_GLIDEIN_Max_Walltime": "603000", "TaskType": "Job", "Cum
 ulativeSlotTime": 165965.0d, "job_ad_information_attrs": "MATCH_GLIDEIN_Gatekeeper", "MachineAttrSlotWeight0": 1, "NumSystemHolds": 0, "CRAB_RetryOnASOFailures": 1, "Used_Gatekeeper": "$$(GLIDEIN_Gatekeeper:Unknown)", "BytesRecvd": 2128005.0d, "MATCH_EXP_JOB_GLIDEIN_SiteWMS_Slot": "Unknown", "JobRunCount": 1, "LastRemoteHost": "glidein_9757_931570227@b635ef6906.cern.ch", "JobPrio": 10, "CRAB_TransferOutputs": 1, "LocalSysCpu": 0.0d, "MATCH_EXP_JOB_GLIDEIN_SiteWMS_JobId": "689255460", "RemoteSysCpu": 1963.0d, "ExecutableSize": 7, "PeriodicHold": false, "CRAB_InputData": "/WJetsToLNu_HT-600To800_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/RunIISpring15DR74-Asympt25ns_MCRUN2_74_V9-v2/MINIAODSIM", "WantRemoteIO": true, "CRAB_JobSW": "CMSSW_7_4_7", "DAGParentNodeNames": "", "CRAB_SiteWhitelist": "{  }", "JOB_GLIDEIN_Max_Walltime": "$$(GLIDEIN_Max_Walltime:Unknown)", "JOB_GLIDEIN_ProcId": "$$(GLIDEIN_ProcId:Unknown)", "CompletionDate": 1440131525, "CRAB_RestHost": "cmsweb.cern.ch", "MATCH_EXP_
 JOB_GLIDEIN_SiteWMS_Queue": "grid_cms", "CRAB_oneEventMode": 0, "Requirements": "( ( ( target.IS_GLIDEIN =!= true ) || ( target.GLIDEIN_CMSSite =!= undefined ) ) && ( GLIDEIN_REQUIRED_OS =?= \"rhel6\" || OpSysMajorVer =?= 6 ) ) && ( ( Memory >= 1 ) && ( Disk >= 1 ) ) && ( TARGET.Arch == \"X86_64\" ) && ( TARGET.OpSys == \"LINUX\" ) && ( TARGET.Disk >= RequestDisk ) && ( TARGET.Memory >= RequestMemory ) && ( TARGET.HasFileTransfer )", "x509UserProxyFQAN": "/DC=ch/DC=cern/OU=Organic Units/OU=Users/CN=kbutanov/CN=727362/CN=Khakimjan Butanov,/cms/Role=NULL/Capability=NULL", "ClusterId": 1233705, "CRAB_PublishGroupName": 0, "BufferSize": 524288, "JOB_GLIDEIN_ClusterId": "$$(GLIDEIN_ClusterId:Unknown)", "CommittedSlotTime": 165965.0d, "DESIRED_Archs": "X86_64", "MATCH_GLIDEIN_ToDie": 1440564896, "LastPublicClaimId": "<128.142.45.103:55332>#1439963327#3#...", "CurrentHosts": 0, "QDate": 1439964847, "Arguments": "-a sandbox.tar.gz --sourceURL=https://cmsweb.cern.ch/crabcache --jobNumber=53 
 --cmsswVersion=CMSSW_7_4_7 --scramArch=slc6_amd64_gcc491 --inputFile=[\"/store/mc/RunIISpring15DR74/WJetsToLNu_HT-600To800_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v2/50000/6E2F932B-633B-E511-A7AE-F04DA23BCE4C.root\"] --runAndLumis=job_lumis_53.json --lheInputFiles=False --firstEvent=None --firstLumi=None --lastEvent=None --firstRun=None --seeding=AutomaticSeeding --scriptExe=None --eventsPerLumi=None --scriptArgs=[] -o {}", "MATCH_EXP_JOB_GLIDEIN_SEs": "srm-eoscms.cern.ch", "CRAB_Id": 53, "User": "uscms5111@cms", "JOB_GLIDEIN_Factory": "$$(GLIDEIN_Factory:Unknown)" }
+{ "GlobalJobId": "crab3-1@submit-5.t2.ucsd.edu#1235991.0#1440081300", "PostJobPrio1": -1439550850, "MATCH_EXP_JOB_GLIDECLIENT_Name": "CMSG-v1_0.overflow", "PostJobPrio2": 3, "JobStartDate": 1440081527, "CRAB_UserGroup": "undefined", "MATCH_EXP_JOB_GLIDEIN_MaxMemMBs": "2500", "MATCH_GLIDEIN_Name": "gfactory_instance", "JobStatus": 4, "JOB_GLIDEIN_CMSSite": "$$(GLIDEIN_CMSSite:Unknown)", "CRAB_TaskWorker": "vocms052", "MATCH_GLIDEIN_Max_Walltime": 603000, "JOB_GLIDEIN_SiteWMS": "$$(GLIDEIN_SiteWMS:Unknown)", "CoreSize": -1, "CRAB_AsyncDest": "T2_CH_CERN", "MATCH_EXP_JOB_Site": "Nebraska", "Rank": 0.0d, "JOB_GLIDEIN_Memory": "$$(Memory:Unknown)", "WantCheckpoint": false, "RemoteWallClockTime": 31976.0d, "JOB_GLIDEIN_Name": "$$(GLIDEIN_Name:Unknown)", "DESIRED_SITES": "T2_US_UCSD,T2_DE_DESY,T2_CH_CSCS,T2_US_MIT,T2_IT_Legnaro,T2_UK_London_Brunel,T2_CH_CERN,T2_UK_London_IC,T3_CH_PSI,T1_UK_RAL", "MaxHosts": 1, "MATCH_EXP_JOB_CMSSite": "T2_US_Nebraska", "RootDir": "/", "JOB_GLIDEIN_ToDie": 
 "$$(GLIDEIN_ToDie:Unknown)", "RecentBlockWriteKbytes": 0, "CRAB_Dest": "/store/temp/group/phys_b2g/BprimeKit_ntuple_747_1_MC/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/crab_QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/150814_111316", "JOB_GLIDEIN_SiteWMS_Queue": "$$(GLIDEIN_SiteWMS_Queue:Unknown)", "CRAB_RestURInoAPI": "/crabserver/prod", "EnteredCurrentStatus": 1440113503, "MATCH_EXP_JOBGLIDEIN_CMSSite": "T2_US_Nebraska", "RequestDisk": 100000, "MATCH_EXP_JOB_GLIDEIN_SiteWMS": "HTCondor", "MyType": "Job", "MATCH_GLIDEIN_ClusterId": 3043383, "PeriodicRemove": "( ( JobStatus =?= 5 ) && ( time() - EnteredCurrentStatus > 7 * 60 ) ) || ( ( JobStatus =?= 2 ) && ( ( MemoryUsage > RequestMemory ) || ( MaxWallTimeMins * 60 < time() - EnteredCurrentStatus ) || ( DiskUsage > 100000000 ) ) ) || ( ( JobStatus =?= 1 ) && ( time() > ( x509UserProxyExpiration + 86400 ) ) )", "CondorVersion": "$CondorVersion: 8.3.5 Apr 16 2015 BuildID: 315103 $", "MATCH_GLIDEIN_Gatekeeper": "red-gw1.unl.edu red-gw1.u
 nl.edu:9619", "CRAB_Workflow": "150814_111316:mrodozov_crab_QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8", "NumCkpts_RAW": 0, "MinHosts": 1, "RequestCpus": 1, "RemoteUserCpu": 27257.0d, "MATCH_EXP_JOB_GLIDEIN_Schedd": "schedd_glideins5@gfactory-1.t2.ucsd.edu", "BytesSent": 604821.0d, "MATCH_GLIDEIN_Schedd": "schedd_glideins5@gfactory-1.t2.ucsd.edu", "AutoClusterAttrs": "CheckpointPlatform,DESIRED_Gatekeepers,DESIRED_Sites,MaxWallTimeMins,RequestMemory,REQUIRED_OS,JobUniverse,LastCheckpointPlatform,NumCkpts,x509userproxyfirstfqan,x509userproxysubject,MachineLastMatchTime,DynamicSlot,PartitionableSlot,Slot1_ExpectedMachineGracefulDrainingCompletion,Slot1_JobStarts,Slot1_SelfMonitorAge,Slot1_TotalTimeClaimedBusy,Slot1_TotalTimeUnclaimedIdle,CMS_ALLOW_OVERFLOW,CRAB_UserRole,DESIRED_Overflow_Region,WMAgent_AgentName,CMSGroups,_condor_RequestCpus,_condor_RequestDisk,_condor_RequestMemory,RequestCpus,RequestDisk,WithinResourceLimits,opportunistic_job,ConcurrencyLimits,NiceUser,Rank,Requireme
 nts", "MATCH_GLIDEIN_SiteWMS_Queue": "red-gw1.unl.edu", "NumCkpts": 0, "JobFinishedHookDone": 1440113503, "ImageSize": 2000000, "JobUniverse": 5, "EncryptExecuteDirectory": false, "MATCH_GLIDEIN_ToRetire": 1440630710, "CRAB_JobType": "analysis", "EnvDelim": ";", "ExitBySignal": false, "LastMatchTime": 1440081527, "OrigMaxHosts": 1, "MATCH_EXP_JOB_GLIDEIN_CMSSite": "T2_US_Nebraska", "MATCH_EXP_Used_Gatekeeper": "red-gw1.unl.edu red-gw1.unl.edu:9619", "MATCH_EXP_JOB_GLIDEIN_Entry_Name": "CMS_T2_US_Nebraska_Red_gw1_long", "TerminationPending": true, "CRAB_UserHN": "mrodozov", "BlockReads": 0, "DAGManJobId": 1183604, "MATCH_GLIDEIN_SEs": "srm.unl.edu", "MemoryUsage": "( ( ResidentSetSize + 1023 ) / 1024 )", "PeriodicRemoveReason": "ifThenElse(MemoryUsage > RequestMemory,\"Removed due to memory use\",ifThenElse(MaxWallTimeMins * 60 < time() - EnteredCurrentStatus,\"Removed due to wall clock limit\",ifThenElse(DiskUsage > 100000000,\"Removed due to disk usage\",ifThenElse(time() > x509Use
 rProxyExpiration,\"Removed job due to proxy expiration\",\"Removed due to job being held\"))))", "JobNotification": 0, "AcctGroup": "analysis", "LocalUserCpu": 0.0d, "NumJobMatches": 1, "MATCH_GLIDEIN_Entry_Name": "CMS_T2_US_Nebraska_Red_gw1_long", "ExecutableSize_RAW": 6, "CRAB_SaveLogsFlag": 1, "UserLog": "/data/condor_local/spool/3604/0/cluster1183604.proc0.subproc0/job_log", "use_x509userproxy": true, "DAGManNodesMask": "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27", "MATCH_EXP_JOB_GLIDEIN_ToRetire": "1440630710", "LastJobLeaseRenewal": 1440113502, "AcctGroupUser": "uscms3850", "MATCH_EXP_JOB_GLIDEIN_Factory": "SDSC", "JOB_GLIDEIN_Job_Max_Time": "$$(GLIDEIN_Job_Max_Time:Unknown)", "TransferOutput": "jobReport.json.3", "x509UserProxyEmail": "mircho.nikolaev.rodozov@cern.ch", "CRAB_localOutputFiles": "results.root=results_3.root", "CondorPlatform": "$CondorPlatform: X86_64-RedHat_6.6 $", "MATCH_GLIDEIN_Factory": "SDSC", "accounting_group": "analysis", "DAGNodeName": "Job3", "PeriodicRele
 ase": "( HoldReasonCode == 28 ) || ( HoldReasonCode == 30 ) || ( HoldReasonCode == 13 ) || ( HoldReasonCode == 6 )", "CRAB_Publish": 1, "MATCH_EXP_JOB_GLIDEIN_ClusterId": "3043383", "MATCH_GLIDEIN_MaxMemMBs": 2500, "JOB_GLIDEIN_SiteWMS_Slot": "$$(GLIDEIN_SiteWMS_Slot:Unknown)", "JOB_GLIDEIN_Entry_Name": "$$(GLIDEIN_Entry_Name:Unknown)", "TransferInput": "CMSRunAnalysis.sh,cmscp.py,CMSRunAnalysis.tar.gz,sandbox.tar.gz,run_and_lumis.tar.gz", "CRAB_NumAutomJobRetries": 2, "CommittedSuspensionTime": 0, "CRAB_DataBlock": "/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/RunIISpring15DR74-Asympt25ns_MCRUN2_74_V9-v1/MINIAODSIM#242b435c-fc56-11e4-bda5-001e67abef8c", "MATCH_EXP_JOB_GLIDEIN_Name": "gfactory_instance", "StreamErr": false, "CRAB_ReqName": "150814_111316:mrodozov_crab_QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8", "CMS_ALLOW_OVERFLOW": "True", "RecentBlockReadKbytes": 0, "WantRemoteSyscalls": false, "MATCH_GLIDEIN_Job_Max_Time": 34800, "NumShadowStarts": 1, "MachineAttrCpus0": 1, "DESI
 RED_OpSyses": "LINUX", "DAGManNodesLog": "/data/condor_local/spool/3604/0/cluster1183604.proc0.subproc0/RunJobs.dag.nodes.log", "Owner": "uscms3850", "PreJobPrio1": 0, "DiskUsage": 4250000, "CRAB_ASOURL": "https://cmsweb.cern.ch/couchdb", "JobLeaseDuration": 1200, "x509UserProxyFirstFQAN": "/cms/Role=NULL/Capability=NULL", "Env": "CRAB_TASKMANAGER_TARBALL=local;SCRAM_ARCH=slc6_amd64_gcc491;CRAB_RUNTIME_TARBALL=local", "MaxWallTimeMins": 1400, "CRAB_SiteBlacklist": "{  }", "JOB_GLIDEIN_MaxMemMBs": "$$(GLIDEIN_MaxMemMBs:Unknown)", "RecentStatsLifetimeStarter": 1200, "CRAB_EDMOutputFiles": "{  }", "MATCH_GLIDEIN_CMSSite": "T2_US_Nebraska", "Iwd": "/data/condor_local/spool/3604/0/cluster1183604.proc0.subproc0", "JOB_GLIDEIN_SEs": "$$(GLIDEIN_SEs:Unknown)", "StreamOut": false, "JobAdInformationAttrs": "MATCH_EXP_JOBGLIDEIN_CMSSite, JOBGLIDEIN_CMSSite, RemoteSysCpu, RemoteUserCpu", "StatsLifetimeStarter": 31968, "SubmitEventNotes": "DAG Node: Job3", "CRAB_UserDN": "/DC=ch/DC=cern/OU=Organ
 ic Units/OU=Users/CN=mrodozov/CN=692532/CN=Mircho Nikolaev Rodozov", "CRAB_TFileOutputFiles": "{ \"results.root\" }", "LeaveJobInQueue": false, "AutoClusterId": 16278, "StartdPrincipal": "execute-side@matchsession/129.93.183.127", "JOB_Gatekeeper": "ifthenelse(substr(Used_Gatekeeper,0,1) =!= \"$\",Used_Gatekeeper,ifthenelse(MATCH_GLIDEIN_Gatekeeper =!= undefined,MATCH_GLIDEIN_Gatekeeper,\"Unknown\"))", "MATCH_Memory": 2500, "MATCH_GLIDEIN_SiteWMS_Slot": "slot1_32@red-d23n7.unl.edu", "CRAB_StageoutPolicy": "local,remote", "OnExitRemove": true, "MaxWallTimeMins_RAW": 1400, "JobCurrentStartDate": 1440081527, "CRAB_ASOTimeout": 86400, "ExitStatus": 0, "JOB_GLIDECLIENT_Name": "$$(GLIDECLIENT_Name:Unknown)", "CRAB_Destination": "srm://srm-eoscms.cern.ch:8443/srm/v2/server?SFN=/eos/cms/store/group/phys_b2g/BprimeKit_ntuple_747_1_MC/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/crab_QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/150814_111316/0000/log/cmsRun_3.log.tar.gz, srm://srm-eoscms.cern.ch:
 8443/srm/v2/server?SFN=/eos/cms/store/group/phys_b2g/BprimeKit_ntuple_747_1_MC/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/crab_QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/150814_111316/0000/results_3.root", "CRAB_DBSURL": "https://cmsweb.cern.ch/dbs/prod/global/DBSReader", "ResidentSetSize": 1250000, "x509UserProxyExpiration": 1440171330, "x509userproxy": "/data/condor_local/spool/3604/0/cluster1183604.proc0.subproc0/3adf46df379a2324bc159ae74f147ae01ca238c9", "JOB_GLIDEIN_SiteWMS_JobId": "$$(GLIDEIN_SiteWMS_JobId:Unknown)", "In": "/dev/null", "MATCH_GLIDEIN_SiteWMS": "HTCondor", "NumRestarts": 0, "DiskUsage_RAW": 4111436, "JOB_Site": "$$(GLIDEIN_Site:Unknown)", "MATCH_GLIDEIN_SiteWMS_JobId": "5096573.0", "ResidentSetSize_RAW": 1174388, "x509userproxysubject": "/DC=ch/DC=cern/OU=Organic Units/OU=Users/CN=mrodozov/CN=692532/CN=Mircho Nikolaev Rodozov", "JOBGLIDEIN_CMSSite": "$$([ifThenElse(GLIDEIN_CMSSite is undefined, \"Unknown\", GLIDEIN_CMSSite)])", "CRAB_ISB": "https://cmsweb.cern.
 ch/crabcache", "Cmd": "/data/condor_local/spool/3604/0/cluster1183604.proc0.subproc0/gWMS-CMSRunAnalysis.sh", "RequestDisk_RAW": 1, "CRAB_AdditionalOutputFiles": "{  }", "ShouldTransferFiles": "YES", "TargetType": "Machine", "MATCH_EXP_JOB_GLIDEIN_Job_Max_Time": "34800", "CRAB_UserRole": "undefined", "BlockWrites": 0, "NiceUser": false, "Out": "job_out.3", "ImageSize_RAW": 1756756, "JOB_CMSSite": "$$(GLIDEIN_CMSSite:Unknown)", "x509UserProxyVOName": "cms", "DESIRED_Overflow_Region": "regexps(\"T[12]_US_\",DESIRED_Sites,\"US\")", "CRAB_JobArch": "slc6_amd64_gcc491", "CumulativeSuspensionTime": 0, "JOB_GLIDEIN_Site": "$$(GLIDEIN_Site:Unknown)", "MATCH_EXP_JOB_GLIDEIN_Memory": "2500", "TransferIn": false, "MATCH_EXP_JOB_GLIDEIN_ProcId": "14", "CRAB_BlacklistT1": 0, "Err": "job_err.3", "RecentBlockWrites": 0, "RequestMemory_RAW": 2000, "ProcId": 0, "RecentBlockReads": 0, "TransferInputSizeMB": 42, "SpooledOutputFiles": "jobReport.json.3", "MATCH_GLIDEIN_Site": "Nebraska", "BlockWriteKby
 tes": 0, "SpoolOnEvict": false, "WhenToTransferOutput": "ON_EXIT_OR_EVICT", "MATCH_GLIDECLIENT_Name": "CMSG-v1_0.overflow", "JobCurrentStartExecutingDate": 1440081533, "MATCH_GLIDEIN_ProcId": 14, "DESIRED_OpSysMajorVers": "6", "OnExitHold": false, "RequestMemory": 2000, "NumJobStarts": 1, "JOB_GLIDEIN_ToRetire": "$$(GLIDEIN_ToRetire:Unknown)", "JOB_GLIDEIN_Schedd": "$$(GLIDEIN_Schedd:Unknown)", "TotalSuspensions": 0, "ExitCode": 0, "CRAB_PublishName": "crab_QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8-025cf8039fdddfc0e0037d5a7ca660ac", "CRAB_UserWebDir": "http://submit-5.t2.ucsd.edu/CSstoragePath/54/uscms3850/150814_111316:mrodozov_crab_QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8", "BlockReadKbytes": 0, "AccountingGroup": "analysis.mrodozov", "MATCH_EXP_JOB_GLIDEIN_ToDie": "1440665510", "CRAB_PublishDBSURL": "https://cmsweb.cern.ch/dbs/prod/phys03/DBSWriter", "MATCH_EXP_JOB_GLIDEIN_Site": "Nebraska", "LastJobStatus": 2, "BufferBlockSize": 32768, "CommittedTime": 31976, "CRAB_Retry": 3,
  "LastSuspensionTime": 0, "MATCH_EXP_JOB_GLIDEIN_Max_Walltime": "603000", "TaskType": "Job", "CumulativeSlotTime": 31976.0d, "job_ad_information_attrs": "MATCH_GLIDEIN_Gatekeeper", "MachineAttrSlotWeight0": 1, "NumSystemHolds": 0, "CRAB_RetryOnASOFailures": 1, "Used_Gatekeeper": "$$(GLIDEIN_Gatekeeper:Unknown)", "BytesRecvd": 4.4879356E7d, "MATCH_EXP_JOB_GLIDEIN_SiteWMS_Slot": "slot1_32@red-d23n7.unl.edu", "JobRunCount": 1, "LastRemoteHost": "glidein_11321_920434792@red-d23n7.unl.edu", "JobPrio": 10, "CRAB_TransferOutputs": 1, "LocalSysCpu": 0.0d, "MATCH_EXP_JOB_GLIDEIN_SiteWMS_JobId": "5096573.0", "RemoteSysCpu": 621.0d, "ExecutableSize": 7, "PeriodicHold": false, "CRAB_InputData": "/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/RunIISpring15DR74-Asympt25ns_MCRUN2_74_V9-v1/MINIAODSIM", "WantRemoteIO": true, "CRAB_JobSW": "CMSSW_7_4_7_patch2", "CRAB_SiteWhitelist": "{  }", "JOB_GLIDEIN_Max_Walltime": "$$(GLIDEIN_Max_Walltime:Unknown)", "JOB_GLIDEIN_ProcId": "$$(GLIDEIN_ProcId:Unknown)"
 , "CompletionDate": 1440113503, "CRAB_RestHost": "cmsweb.cern.ch", "MATCH_EXP_JOB_GLIDEIN_SiteWMS_Queue": "red-gw1.unl.edu", "CRAB_oneEventMode": 0, "Requirements": "( ( ( target.IS_GLIDEIN =!= true ) || ( target.GLIDEIN_CMSSite =!= undefined ) ) && ( GLIDEIN_REQUIRED_OS =?= \"rhel6\" || OpSysMajorVer =?= 6 ) ) && ( ( Memory >= 1 ) && ( Disk >= 1 ) ) && ( TARGET.Arch == \"X86_64\" ) && ( TARGET.OpSys == \"LINUX\" ) && ( TARGET.Disk >= RequestDisk ) && ( TARGET.Memory >= RequestMemory ) && ( TARGET.HasFileTransfer )", "x509UserProxyFQAN": "/DC=ch/DC=cern/OU=Organic Units/OU=Users/CN=mrodozov/CN=692532/CN=Mircho Nikolaev Rodozov,/cms/Role=NULL/Capability=NULL", "ClusterId": 1235991, "CRAB_PublishGroupName": 0, "BufferSize": 524288, "JOB_GLIDEIN_ClusterId": "$$(GLIDEIN_ClusterId:Unknown)", "CommittedSlotTime": 31976.0d, "DESIRED_Archs": "X86_64", "MATCH_GLIDEIN_ToDie": 1440665510, "LastPublicClaimId": "<129.93.183.127:56441>#1440063351#7#...", "CurrentHosts": 0, "QDate": 1440081300, "A
 rguments": "-a sandbox.tar.gz --sourceURL=https://cmsweb.cern.ch/crabcache --jobNumber=3 --cmsswVersion=CMSSW_7_4_7_patch2 --scramArch=slc6_amd64_gcc491 --inputFile=[\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/60000/06DE7D5F-D4FB-E411-9C85-00261894394A.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/80000/E29E093E-54FC-E411-8AE5-0025905A60FE.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/50000/F0FDF730-EDFB-E411-842B-00261834B51D.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/70000/FECCF363-F5FB-E411-85A3-002590DBDFE0.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/50000/0E4CEBFE-ECFB-E411-9F0C-842B2B29273C.root\",' '\"/store/mc
 /RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/80000/ECF66DCC-F0FB-E411-84CF-00259074AE32.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/60000/96F29C69-D4FB-E411-9028-842B2B292627.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/50000/6E887F0F-EDFB-E411-875B-BCAEC54B303A.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/70000/0C788712-F5FB-E411-AA0E-AC853D9DAC29.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/70000/723A41AE-F4FB-E411-BAA3-0025905C431A.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/70000/DA4EA0F5-F4FB-E411-B2AD-00259073E31C.root\",' '\"/store/mc/RunIISpring1
 5DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/70000/98C8F097-F7FB-E411-9A1F-52540006FB8D.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/80000/DE4F8235-5FFC-E411-80CD-0025905A6088.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/70000/EA5D6151-F5FB-E411-99F0-0026B92E0C74.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/70000/205D0CF9-F4FB-E411-934D-000F532734AC.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/80000/24BCAED9-F0FB-E411-A35B-00259074AE54.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/70000/A4C160C1-F4FB-E411-A66D-B083FED76C6C.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_
 300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/80000/E86B536C-54FC-E411-8787-AC853D9DACE1.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/50000/2E68E42D-EDFB-E411-8027-001E67397CC9.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/50000/A056D12B-EDFB-E411-9E51-52540006FDD6.root\"] --runAndLumis=job_lumis_3.json --lheInputFiles=False --firstEvent=None --firstLumi=None --lastEvent=None --firstRun=None --seeding=AutomaticSeeding --scriptExe=None --eventsPerLumi=None --scriptArgs=[] -o {}", "MATCH_EXP_JOB_GLIDEIN_SEs": "srm.unl.edu", "CRAB_Id": 3, "User": "uscms3850@cms", "JOB_GLIDEIN_Factory": "$$(GLIDEIN_Factory:Unknown)" }
+{ "GlobalJobId": "crab3-1@submit-5.t2.ucsd.edu#1235992.0#1440081300", "PostJobPrio1": -1439550850, "MATCH_EXP_JOB_GLIDECLIENT_Name": "CMSG-v1_0.overflow", "PostJobPrio2": 3, "JobStartDate": 1440081782, "CRAB_UserGroup": "undefined", "MATCH_EXP_JOB_GLIDEIN_MaxMemMBs": "2500", "MATCH_GLIDEIN_Name": "gfactory_instance", "JobStatus": 4, "JOB_GLIDEIN_CMSSite": "$$(GLIDEIN_CMSSite:Unknown)", "CRAB_TaskWorker": "vocms052", "MATCH_GLIDEIN_Max_Walltime": 603000, "JOB_GLIDEIN_SiteWMS": "$$(GLIDEIN_SiteWMS:Unknown)", "CoreSize": -1, "CRAB_AsyncDest": "T2_CH_CERN", "MATCH_EXP_JOB_Site": "Nebraska", "Rank": 0.0d, "JOB_GLIDEIN_Memory": "$$(Memory:Unknown)", "WantCheckpoint": false, "RemoteWallClockTime": 33360.0d, "JOB_GLIDEIN_Name": "$$(GLIDEIN_Name:Unknown)", "DESIRED_SITES": "T2_US_UCSD,T2_DE_DESY,T2_CH_CSCS,T2_US_MIT,T2_IT_Legnaro,T2_UK_London_Brunel,T2_CH_CERN,T2_UK_London_IC,T3_CH_PSI,T1_UK_RAL", "MaxHosts": 1, "MATCH_EXP_JOB_CMSSite": "T2_US_Nebraska", "RootDir": "/", "JOB_GLIDEIN_ToDie": 
 "$$(GLIDEIN_ToDie:Unknown)", "RecentBlockWriteKbytes": 0, "CRAB_Dest": "/store/temp/group/phys_b2g/BprimeKit_ntuple_747_1_MC/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/crab_QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/150814_111316", "JOB_GLIDEIN_SiteWMS_Queue": "$$(GLIDEIN_SiteWMS_Queue:Unknown)", "CRAB_RestURInoAPI": "/crabserver/prod", "EnteredCurrentStatus": 1440115142, "MATCH_EXP_JOBGLIDEIN_CMSSite": "T2_US_Nebraska", "RequestDisk": 100000, "MATCH_EXP_JOB_GLIDEIN_SiteWMS": "HTCondor", "MyType": "Job", "MATCH_GLIDEIN_ClusterId": 2561111, "PeriodicRemove": "( ( JobStatus =?= 5 ) && ( time() - EnteredCurrentStatus > 7 * 60 ) ) || ( ( JobStatus =?= 2 ) && ( ( MemoryUsage > RequestMemory ) || ( MaxWallTimeMins * 60 < time() - EnteredCurrentStatus ) || ( DiskUsage > 100000000 ) ) ) || ( ( JobStatus =?= 1 ) && ( time() > ( x509UserProxyExpiration + 86400 ) ) )", "CondorVersion": "$CondorVersion: 8.3.5 Apr 16 2015 BuildID: 315103 $", "MATCH_GLIDEIN_Gatekeeper": "red.unl.edu red.unl.edu:9
 619", "CRAB_Workflow": "150814_111316:mrodozov_crab_QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8", "NumCkpts_RAW": 0, "MinHosts": 1, "RequestCpus": 1, "RemoteUserCpu": 28513.0d, "MATCH_EXP_JOB_GLIDEIN_Schedd": "schedd_glideins6@glidein.grid.iu.edu", "BytesSent": 597241.0d, "MATCH_GLIDEIN_Schedd": "schedd_glideins6@glidein.grid.iu.edu", "AutoClusterAttrs": "CheckpointPlatform,DESIRED_Gatekeepers,DESIRED_Sites,MaxWallTimeMins,RequestMemory,REQUIRED_OS,JobUniverse,LastCheckpointPlatform,NumCkpts,x509userproxyfirstfqan,x509userproxysubject,MachineLastMatchTime,DynamicSlot,PartitionableSlot,Slot1_ExpectedMachineGracefulDrainingCompletion,Slot1_JobStarts,Slot1_SelfMonitorAge,Slot1_TotalTimeClaimedBusy,Slot1_TotalTimeUnclaimedIdle,CMS_ALLOW_OVERFLOW,CRAB_UserRole,DESIRED_Overflow_Region,WMAgent_AgentName,CMSGroups,_condor_RequestCpus,_condor_RequestDisk,_condor_RequestMemory,RequestCpus,RequestDisk,WithinResourceLimits,opportunistic_job,ConcurrencyLimits,NiceUser,Rank,Requirements", "MATCH_G
 LIDEIN_SiteWMS_Queue": "red.unl.edu", "NumCkpts": 0, "JobFinishedHookDone": 1440115142, "ImageSize": 1750000, "JobUniverse": 5, "EncryptExecuteDirectory": false, "MATCH_GLIDEIN_ToRetire": 1440616411, "CRAB_JobType": "analysis", "EnvDelim": ";", "ExitBySignal": false, "LastMatchTime": 1440081782, "OrigMaxHosts": 1, "MATCH_EXP_JOB_GLIDEIN_CMSSite": "T2_US_Nebraska", "MATCH_EXP_Used_Gatekeeper": "red.unl.edu red.unl.edu:9619", "MATCH_EXP_JOB_GLIDEIN_Entry_Name": "CMS_T2_US_Nebraska_Red_long", "TerminationPending": true, "CRAB_UserHN": "mrodozov", "BlockReads": 0, "DAGManJobId": 1183604, "MATCH_GLIDEIN_SEs": "srm.unl.edu", "MemoryUsage": "( ( ResidentSetSize + 1023 ) / 1024 )", "PeriodicRemoveReason": "ifThenElse(MemoryUsage > RequestMemory,\"Removed due to memory use\",ifThenElse(MaxWallTimeMins * 60 < time() - EnteredCurrentStatus,\"Removed due to wall clock limit\",ifThenElse(DiskUsage > 100000000,\"Removed due to disk usage\",ifThenElse(time() > x509UserProxyExpiration,\"Removed job
  due to proxy expiration\",\"Removed due to job being held\"))))", "JobNotification": 0, "AcctGroup": "analysis", "LocalUserCpu": 0.0d, "NumJobMatches": 1, "MATCH_GLIDEIN_Entry_Name": "CMS_T2_US_Nebraska_Red_long", "ExecutableSize_RAW": 6, "CRAB_SaveLogsFlag": 1, "UserLog": "/data/condor_local/spool/3604/0/cluster1183604.proc0.subproc0/job_log", "use_x509userproxy": true, "DAGManNodesMask": "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27", "MATCH_EXP_JOB_GLIDEIN_ToRetire": "1440616411", "LastJobLeaseRenewal": 1440115142, "AcctGroupUser": "uscms3850", "MATCH_EXP_JOB_GLIDEIN_Factory": "OSGGOC", "JOB_GLIDEIN_Job_Max_Time": "$$(GLIDEIN_Job_Max_Time:Unknown)", "TransferOutput": "jobReport.json.4", "x509UserProxyEmail": "mircho.nikolaev.rodozov@cern.ch", "CRAB_localOutputFiles": "results.root=results_4.root", "CondorPlatform": "$CondorPlatform: X86_64-RedHat_6.6 $", "MATCH_GLIDEIN_Factory": "OSGGOC", "accounting_group": "analysis", "DAGNodeName": "Job4", "PeriodicRelease": "( HoldReasonCode == 28 
 ) || ( HoldReasonCode == 30 ) || ( HoldReasonCode == 13 ) || ( HoldReasonCode == 6 )", "CRAB_Publish": 1, "MATCH_EXP_JOB_GLIDEIN_ClusterId": "2561111", "MATCH_GLIDEIN_MaxMemMBs": 2500, "JOB_GLIDEIN_SiteWMS_Slot": "$$(GLIDEIN_SiteWMS_Slot:Unknown)", "JOB_GLIDEIN_Entry_Name": "$$(GLIDEIN_Entry_Name:Unknown)", "TransferInput": "CMSRunAnalysis.sh,cmscp.py,CMSRunAnalysis.tar.gz,sandbox.tar.gz,run_and_lumis.tar.gz", "CRAB_NumAutomJobRetries": 2, "CommittedSuspensionTime": 0, "CRAB_DataBlock": "/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/RunIISpring15DR74-Asympt25ns_MCRUN2_74_V9-v1/MINIAODSIM#242b435c-fc56-11e4-bda5-001e67abef8c", "MATCH_EXP_JOB_GLIDEIN_Name": "gfactory_instance", "StreamErr": false, "CRAB_ReqName": "150814_111316:mrodozov_crab_QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8", "CMS_ALLOW_OVERFLOW": "True", "RecentBlockReadKbytes": 0, "WantRemoteSyscalls": false, "MATCH_GLIDEIN_Job_Max_Time": 34800, "NumShadowStarts": 1, "MachineAttrCpus0": 1, "DESIRED_OpSyses": "LINUX", "DAGMan
 NodesLog": "/data/condor_local/spool/3604/0/cluster1183604.proc0.subproc0/RunJobs.dag.nodes.log", "Owner": "uscms3850", "PreJobPrio1": 0, "DiskUsage": 3750000, "CRAB_ASOURL": "https://cmsweb.cern.ch/couchdb", "JobLeaseDuration": 1200, "x509UserProxyFirstFQAN": "/cms/Role=NULL/Capability=NULL", "Env": "CRAB_TASKMANAGER_TARBALL=local;SCRAM_ARCH=slc6_amd64_gcc491;CRAB_RUNTIME_TARBALL=local", "MaxWallTimeMins": 1400, "CRAB_SiteBlacklist": "{  }", "JOB_GLIDEIN_MaxMemMBs": "$$(GLIDEIN_MaxMemMBs:Unknown)", "RecentStatsLifetimeStarter": 1200, "CRAB_EDMOutputFiles": "{  }", "MATCH_GLIDEIN_CMSSite": "T2_US_Nebraska", "Iwd": "/data/condor_local/spool/3604/0/cluster1183604.proc0.subproc0", "JOB_GLIDEIN_SEs": "$$(GLIDEIN_SEs:Unknown)", "StreamOut": false, "JobAdInformationAttrs": "MATCH_EXP_JOBGLIDEIN_CMSSite, JOBGLIDEIN_CMSSite, RemoteSysCpu, RemoteUserCpu", "StatsLifetimeStarter": 33352, "SubmitEventNotes": "DAG Node: Job4", "CRAB_UserDN": "/DC=ch/DC=cern/OU=Organic Units/OU=Users/CN=mrodozov/
 CN=692532/CN=Mircho Nikolaev Rodozov", "CRAB_TFileOutputFiles": "{ \"results.root\" }", "LeaveJobInQueue": false, "AutoClusterId": 16278, "StartdPrincipal": "execute-side@matchsession/129.93.182.12", "JOB_Gatekeeper": "ifthenelse(substr(Used_Gatekeeper,0,1) =!= \"$\",Used_Gatekeeper,ifthenelse(MATCH_GLIDEIN_Gatekeeper =!= undefined,MATCH_GLIDEIN_Gatekeeper,\"Unknown\"))", "MATCH_Memory": 2500, "MATCH_GLIDEIN_SiteWMS_Slot": "slot1_6@red-d8n12.unl.edu", "CRAB_StageoutPolicy": "local,remote", "OnExitRemove": true, "MaxWallTimeMins_RAW": 1400, "JobCurrentStartDate": 1440081782, "CRAB_ASOTimeout": 86400, "ExitStatus": 0, "JOB_GLIDECLIENT_Name": "$$(GLIDECLIENT_Name:Unknown)", "CRAB_Destination": "srm://srm-eoscms.cern.ch:8443/srm/v2/server?SFN=/eos/cms/store/group/phys_b2g/BprimeKit_ntuple_747_1_MC/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/crab_QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/150814_111316/0000/log/cmsRun_4.log.tar.gz, srm://srm-eoscms.cern.ch:8443/srm/v2/server?SFN=/eos/cms/
 store/group/phys_b2g/BprimeKit_ntuple_747_1_MC/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/crab_QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/150814_111316/0000/results_4.root", "CRAB_DBSURL": "https://cmsweb.cern.ch/dbs/prod/global/DBSReader", "ResidentSetSize": 1250000, "x509UserProxyExpiration": 1440171330, "x509userproxy": "/data/condor_local/spool/3604/0/cluster1183604.proc0.subproc0/3adf46df379a2324bc159ae74f147ae01ca238c9", "JOB_GLIDEIN_SiteWMS_JobId": "$$(GLIDEIN_SiteWMS_JobId:Unknown)", "In": "/dev/null", "MATCH_GLIDEIN_SiteWMS": "HTCondor", "NumRestarts": 0, "DiskUsage_RAW": 3661158, "JOB_Site": "$$(GLIDEIN_Site:Unknown)", "MATCH_GLIDEIN_SiteWMS_JobId": "5092137.0", "ResidentSetSize_RAW": 1148372, "x509userproxysubject": "/DC=ch/DC=cern/OU=Organic Units/OU=Users/CN=mrodozov/CN=692532/CN=Mircho Nikolaev Rodozov", "JOBGLIDEIN_CMSSite": "$$([ifThenElse(GLIDEIN_CMSSite is undefined, \"Unknown\", GLIDEIN_CMSSite)])", "CRAB_ISB": "https://cmsweb.cern.ch/crabcache", "Cmd": "/data/con
 dor_local/spool/3604/0/cluster1183604.proc0.subproc0/gWMS-CMSRunAnalysis.sh", "RequestDisk_RAW": 1, "CRAB_AdditionalOutputFiles": "{  }", "ShouldTransferFiles": "YES", "TargetType": "Machine", "MATCH_EXP_JOB_GLIDEIN_Job_Max_Time": "34800", "CRAB_UserRole": "undefined", "BlockWrites": 0, "NiceUser": false, "Out": "job_out.4", "ImageSize_RAW": 1727056, "JOB_CMSSite": "$$(GLIDEIN_CMSSite:Unknown)", "x509UserProxyVOName": "cms", "DESIRED_Overflow_Region": "regexps(\"T[12]_US_\",DESIRED_Sites,\"US\")", "CRAB_JobArch": "slc6_amd64_gcc491", "CumulativeSuspensionTime": 0, "JOB_GLIDEIN_Site": "$$(GLIDEIN_Site:Unknown)", "MATCH_EXP_JOB_GLIDEIN_Memory": "2500", "TransferIn": false, "MATCH_EXP_JOB_GLIDEIN_ProcId": "8", "CRAB_BlacklistT1": 0, "Err": "job_err.4", "RecentBlockWrites": 0, "RequestMemory_RAW": 2000, "ProcId": 0, "RecentBlockReads": 0, "TransferInputSizeMB": 42, "SpooledOutputFiles": "jobReport.json.4", "MATCH_GLIDEIN_Site": "Nebraska", "BlockWriteKbytes": 0, "SpoolOnEvict": false, "
 WhenToTransferOutput": "ON_EXIT_OR_EVICT", "MATCH_GLIDECLIENT_Name": "CMSG-v1_0.overflow", "JobCurrentStartExecutingDate": 1440081789, "MATCH_GLIDEIN_ProcId": 8, "DESIRED_OpSysMajorVers": "6", "OnExitHold": false, "RequestMemory": 2000, "NumJobStarts": 1, "JOB_GLIDEIN_ToRetire": "$$(GLIDEIN_ToRetire:Unknown)", "JOB_GLIDEIN_Schedd": "$$(GLIDEIN_Schedd:Unknown)", "TotalSuspensions": 0, "ExitCode": 0, "CRAB_PublishName": "crab_QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8-025cf8039fdddfc0e0037d5a7ca660ac", "CRAB_UserWebDir": "http://submit-5.t2.ucsd.edu/CSstoragePath/54/uscms3850/150814_111316:mrodozov_crab_QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8", "BlockReadKbytes": 0, "AccountingGroup": "analysis.mrodozov", "MATCH_EXP_JOB_GLIDEIN_ToDie": "1440651211", "CRAB_PublishDBSURL": "https://cmsweb.cern.ch/dbs/prod/phys03/DBSWriter", "MATCH_EXP_JOB_GLIDEIN_Site": "Nebraska", "LastJobStatus": 2, "BufferBlockSize": 32768, "CommittedTime": 33360, "CRAB_Retry": 3, "LastSuspensionTime": 0, "MATCH_E
 XP_JOB_GLIDEIN_Max_Walltime": "603000", "TaskType": "Job", "CumulativeSlotTime": 33360.0d, "job_ad_information_attrs": "MATCH_GLIDEIN_Gatekeeper", "MachineAttrSlotWeight0": 1, "NumSystemHolds": 0, "CRAB_RetryOnASOFailures": 1, "Used_Gatekeeper": "$$(GLIDEIN_Gatekeeper:Unknown)", "BytesRecvd": 4.4879356E7d, "MATCH_EXP_JOB_GLIDEIN_SiteWMS_Slot": "slot1_6@red-d8n12.unl.edu", "JobRunCount": 1, "LastRemoteHost": "glidein_1936_57194584@red-d8n12.unl.edu", "JobPrio": 10, "CRAB_TransferOutputs": 1, "LocalSysCpu": 0.0d, "MATCH_EXP_JOB_GLIDEIN_SiteWMS_JobId": "5092137.0", "RemoteSysCpu": 616.0d, "ExecutableSize": 7, "PeriodicHold": false, "CRAB_InputData": "/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/RunIISpring15DR74-Asympt25ns_MCRUN2_74_V9-v1/MINIAODSIM", "WantRemoteIO": true, "CRAB_JobSW": "CMSSW_7_4_7_patch2", "CRAB_SiteWhitelist": "{  }", "JOB_GLIDEIN_Max_Walltime": "$$(GLIDEIN_Max_Walltime:Unknown)", "JOB_GLIDEIN_ProcId": "$$(GLIDEIN_ProcId:Unknown)", "CompletionDate": 1440115142, "CRAB
 _RestHost": "cmsweb.cern.ch", "MATCH_EXP_JOB_GLIDEIN_SiteWMS_Queue": "red.unl.edu", "CRAB_oneEventMode": 0, "Requirements": "( ( ( target.IS_GLIDEIN =!= true ) || ( target.GLIDEIN_CMSSite =!= undefined ) ) && ( GLIDEIN_REQUIRED_OS =?= \"rhel6\" || OpSysMajorVer =?= 6 ) ) && ( ( Memory >= 1 ) && ( Disk >= 1 ) ) && ( TARGET.Arch == \"X86_64\" ) && ( TARGET.OpSys == \"LINUX\" ) && ( TARGET.Disk >= RequestDisk ) && ( TARGET.Memory >= RequestMemory ) && ( TARGET.HasFileTransfer )", "x509UserProxyFQAN": "/DC=ch/DC=cern/OU=Organic Units/OU=Users/CN=mrodozov/CN=692532/CN=Mircho Nikolaev Rodozov,/cms/Role=NULL/Capability=NULL", "ClusterId": 1235992, "CRAB_PublishGroupName": 0, "BufferSize": 524288, "JOB_GLIDEIN_ClusterId": "$$(GLIDEIN_ClusterId:Unknown)", "CommittedSlotTime": 33360.0d, "DESIRED_Archs": "X86_64", "MATCH_GLIDEIN_ToDie": 1440651211, "LastPublicClaimId": "<129.93.182.12:42491>#1440048812#7#...", "CurrentHosts": 0, "QDate": 1440081300, "Arguments": "-a sandbox.tar.gz --sourceURL=
 https://cmsweb.cern.ch/crabcache --jobNumber=4 --cmsswVersion=CMSSW_7_4_7_patch2 --scramArch=slc6_amd64_gcc491 --inputFile=[\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/80000/9A89CA60-69FC-E411-9661-0025905C42B8.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/60000/34F8B66A-D4FB-E411-8F89-842B2B29273C.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/70000/7CE6B848-F5FB-E411-A605-0025905A60A8.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/80000/9E842AA8-54FC-E411-8BC7-000F53273500.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/60000/C44AD465-D4FB-E411-8704-002590200A40.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUE
 TP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/80000/90B6CB1B-07FD-E411-BD52-001E67397CBA.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/80000/183FB65F-69FC-E411-A5A8-0025904B7C26.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/50000/5A0A9A0E-EDFB-E411-B95F-00266CF330B8.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/80000/3E3768F1-61FC-E411-B163-002618943956.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/70000/08DB9DDE-F4FB-E411-9BC9-52540001DACD.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/80000/CE293F9B-54FC-E411-83E8-AC853D9DACD3.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_p
 ythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/80000/A4479F5F-69FC-E411-B0B5-0025904C6378.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/60000/0419455F-D4FB-E411-AEFA-00261894394A.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/80000/E6BD6C76-54FC-E411-A1F2-AC853D9DACD7.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/70000/AC15F863-F5FB-E411-8F07-002590DB9286.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/70000/CC9B7EE2-F4FB-E411-BCD9-52540001DACD.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/60000/5844575F-D4FB-E411-81F5-003048FFD732.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAO
 DSIM/Asympt25ns_MCRUN2_74_V9-v1/60000/6EC5205E-D4FB-E411-9885-001E67396BB7.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/80000/B63200E8-69FC-E411-B949-0025904C51FC.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/80000/14554A42-54FC-E411-86D2-0025905A605E.root\"] --runAndLumis=job_lumis_4.json --lheInputFiles=False --firstEvent=None --firstLumi=None --lastEvent=None --firstRun=None --seeding=AutomaticSeeding --scriptExe=None --eventsPerLumi=None --scriptArgs=[] -o {}", "MATCH_EXP_JOB_GLIDEIN_SEs": "srm.unl.edu", "CRAB_Id": 4, "User": "uscms3850@cms", "JOB_GLIDEIN_Factory": "$$(GLIDEIN_Factory:Unknown)" }

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d630d1a2/asterixdb/asterix-app/src/test/resources/runtimets/results/global-aggregate/q01/q01.1.adm
----------------------------------------------------------------------
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/global-aggregate/q01/q01.1.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/global-aggregate/q01/q01.1.adm
new file mode 100644
index 0000000..8e357b8
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/global-aggregate/q01/q01.1.adm
@@ -0,0 +1 @@
+{ "count": 10 }

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d630d1a2/asterixdb/asterix-app/src/test/resources/runtimets/results/global-aggregate/q02/q02.1.adm
----------------------------------------------------------------------
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/global-aggregate/q02/q02.1.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/global-aggregate/q02/q02.1.adm
new file mode 100644
index 0000000..0d74afe
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/global-aggregate/q02/q02.1.adm
@@ -0,0 +1 @@
+{ "foo": 1, "count": 10 }

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d630d1a2/asterixdb/asterix-app/src/test/resources/runtimets/results/global-aggregate/q08/q08.1.adm
----------------------------------------------------------------------
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/global-aggregate/q08/q08.1.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/global-aggregate/q08/q08.1.adm
new file mode 100644
index 0000000..e45bb55
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/global-aggregate/q08/q08.1.adm
@@ -0,0 +1,10 @@
+{ "count": 4 }
+{ "count": 2 }
+{ "count": 4 }
+{ "count": 1 }
+{ "count": 3 }
+{ "count": 3 }
+{ "count": 1 }
+{ "count": 1 }
+{ "count": 2 }
+{ "count": 3 }

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d630d1a2/asterixdb/asterix-app/src/test/resources/runtimets/results/temp-dataset/temp_primary_plus_ngram_flush/temp_primary_plus_ngram_flush.3.adm
----------------------------------------------------------------------
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/temp-dataset/temp_primary_plus_ngram_flush/temp_primary_plus_ngram_flush.3.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/temp-dataset/temp_primary_plus_ngram_flush/temp_primary_plus_ngram_flush.3.adm
new file mode 100644
index 0000000..e37d32a
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/temp-dataset/temp_primary_plus_ngram_flush/temp_primary_plus_ngram_flush.3.adm
@@ -0,0 +1 @@
+1000
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d630d1a2/asterixdb/asterix-app/src/test/resources/runtimets/results/tpch/q06_forecast_revenue_change/q06_forecast_revenue_change.2.adm
----------------------------------------------------------------------
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results/tpch/q06_forecast_revenue_change/q06_forecast_revenue_change.2.adm b/asterixdb/asterix-app/src/test/resources/runtimets/results/tpch/q06_forecast_revenue_change/q06_forecast_revenue_change.2.adm
new file mode 100644
index 0000000..cebc6b5
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/tpch/q06_forecast_revenue_change/q06_forecast_revenue_change.2.adm
@@ -0,0 +1 @@
+{ "count": 116 }

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d630d1a2/asterixdb/asterix-app/src/test/resources/runtimets/results_parser_sqlpp/tpch/q06_forecast_revenue_change/q06_forecast_revenue_change.4.ast
----------------------------------------------------------------------
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/results_parser_sqlpp/tpch/q06_forecast_revenue_change/q06_forecast_revenue_change.4.ast b/asterixdb/asterix-app/src/test/resources/runtimets/results_parser_sqlpp/tpch/q06_forecast_revenue_change/q06_forecast_revenue_change.4.ast
new file mode 100644
index 0000000..420bd44
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results_parser_sqlpp/tpch/q06_forecast_revenue_change/q06_forecast_revenue_change.4.ast
@@ -0,0 +1,73 @@
+DataverseUse tpch
+Query:
+SELECT [
+FunctionCall tpch.count@1[
+  (
+    SELECT ELEMENT [
+    FieldAccessor [
+      Variable [ Name=#2 ]
+      Field=l
+    ]
+    ]
+    FROM [      Variable [ Name=#1 ]
+      AS
+      Variable [ Name=#2 ]
+    ]
+  )
+]
+count
+]
+FROM [  FunctionCall Metadata.dataset@1[
+    LiteralExpr [STRING] [LineItem]
+  ]
+  AS
+  Variable [ Name=$l ]
+]
+Where
+  OperatorExpr [
+    OperatorExpr [
+      FieldAccessor [
+        Variable [ Name=$l ]
+        Field=l_shipdate
+      ]
+      >=
+      LiteralExpr [STRING] [1994-01-01]
+    ]
+    and
+    OperatorExpr [
+      FieldAccessor [
+        Variable [ Name=$l ]
+        Field=l_shipdate
+      ]
+      <
+      LiteralExpr [STRING] [1995-01-01]
+    ]
+    and
+    OperatorExpr [
+      FieldAccessor [
+        Variable [ Name=$l ]
+        Field=l_discount
+      ]
+      >=
+      LiteralExpr [DOUBLE] [0.05]
+    ]
+    and
+    OperatorExpr [
+      FieldAccessor [
+        Variable [ Name=$l ]
+        Field=l_discount
+      ]
+      <=
+      LiteralExpr [DOUBLE] [0.07]
+    ]
+    and
+    OperatorExpr [
+      FieldAccessor [
+        Variable [ Name=$l ]
+        Field=l_quantity
+      ]
+      <
+      LiteralExpr [LONG] [24]
+    ]
+  ]
+Group All

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d630d1a2/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/api/IStreamNotificationHandler.java
----------------------------------------------------------------------
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/api/IStreamNotificationHandler.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/api/IStreamNotificationHandler.java
new file mode 100644
index 0000000..8b014ad
--- /dev/null
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/api/IStreamNotificationHandler.java
@@ -0,0 +1,28 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.api;
+
+public interface IStreamNotificationHandler {
+
+    /**
+     * Used to notify a handler that the stream is about to start reading data from a new source.
+     * An example use is by the parser to skip CSV file headers in case the stream reads from a set of files.
+     */
+    public void notifyNewSource();
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d630d1a2/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/IndexingStreamRecordReader.java
----------------------------------------------------------------------
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/IndexingStreamRecordReader.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/IndexingStreamRecordReader.java
new file mode 100644
index 0000000..6eee892
--- /dev/null
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/IndexingStreamRecordReader.java
@@ -0,0 +1,101 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.input.record.reader;
+
+import java.io.IOException;
+import java.util.List;
+
+import org.apache.asterix.external.api.IExternalIndexer;
+import org.apache.asterix.external.api.IIndexingDatasource;
+import org.apache.asterix.external.api.IRawRecord;
+import org.apache.asterix.external.api.IRecordReader;
+import org.apache.asterix.external.dataflow.AbstractFeedDataFlowController;
+import org.apache.asterix.external.indexing.ExternalFile;
+import org.apache.asterix.external.input.record.reader.stream.StreamRecordReader;
+import org.apache.asterix.external.util.FeedLogManager;
+import org.apache.hadoop.io.Writable;
+import org.apache.hadoop.mapred.RecordReader;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
+
+public class IndexingStreamRecordReader implements IRecordReader<char[]>, IIndexingDatasource {
+
+    private StreamRecordReader reader;
+    private IExternalIndexer indexer;
+
+    public IndexingStreamRecordReader(StreamRecordReader reader, IExternalIndexer indexer) {
+        this.reader = reader;
+        this.indexer = indexer;
+    }
+
+    @Override
+    public void close() throws IOException {
+        reader.close();
+    }
+
+    @Override
+    public IExternalIndexer getIndexer() {
+        return indexer;
+    }
+
+    @Override
+    public boolean hasNext() throws Exception {
+        return reader.hasNext();
+    }
+
+    @Override
+    public IRawRecord<char[]> next() throws IOException, InterruptedException {
+        return reader.next();
+    }
+
+    @Override
+    public boolean stop() {
+        return reader.stop();
+    }
+
+    @Override
+    public void setController(AbstractFeedDataFlowController controller) {
+        reader.setController(controller);
+    }
+
+    @Override
+    public void setFeedLogManager(FeedLogManager feedLogManager) throws HyracksDataException {
+        reader.setFeedLogManager(feedLogManager);
+    }
+
+    @Override
+    public List<ExternalFile> getSnapshot() {
+        return null;
+    }
+
+    @Override
+    public int getCurrentSplitIndex() {
+        return -1;
+    }
+
+    @Override
+    public RecordReader<?, ? extends Writable> getReader() {
+        return null;
+    }
+
+    @Override
+    public boolean handleException(Throwable th) {
+        return reader.handleException(th);
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d630d1a2/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/stream/StreamRecordReader.java
----------------------------------------------------------------------
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/stream/StreamRecordReader.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/stream/StreamRecordReader.java
new file mode 100644
index 0000000..7dc5bce
--- /dev/null
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/stream/StreamRecordReader.java
@@ -0,0 +1,98 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.input.record.reader.stream;
+
+import java.io.IOException;
+
+import org.apache.asterix.external.api.AsterixInputStream;
+import org.apache.asterix.external.api.IRawRecord;
+import org.apache.asterix.external.api.IRecordReader;
+import org.apache.asterix.external.api.IStreamNotificationHandler;
+import org.apache.asterix.external.dataflow.AbstractFeedDataFlowController;
+import org.apache.asterix.external.input.record.CharArrayRecord;
+import org.apache.asterix.external.input.stream.AsterixInputStreamReader;
+import org.apache.asterix.external.util.ExternalDataConstants;
+import org.apache.asterix.external.util.FeedLogManager;
+import org.apache.commons.lang.mutable.MutableBoolean;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
+
+public abstract class StreamRecordReader implements IRecordReader<char[]>, IStreamNotificationHandler {
+    protected final AsterixInputStreamReader reader;
+    protected CharArrayRecord record;
+    protected char[] inputBuffer;
+    protected int bufferLength = 0;
+    protected int bufferPosn = 0;
+    protected boolean done = false;
+    protected FeedLogManager feedLogManager;
+    protected MutableBoolean newFile = new MutableBoolean(false);
+
+    public StreamRecordReader(AsterixInputStream inputStream) {
+        this.reader = new AsterixInputStreamReader(inputStream);
+        record = new CharArrayRecord();
+        inputBuffer = new char[ExternalDataConstants.DEFAULT_BUFFER_SIZE];
+    }
+
+    @Override
+    public IRawRecord<char[]> next() throws IOException {
+        return record;
+    }
+
+    @Override
+    public void close() throws IOException {
+        if (!done) {
+            reader.close();
+        }
+        done = true;
+    }
+
+    @Override
+    public boolean stop() {
+        try {
+            reader.stop();
+            return true;
+        } catch (Exception e) {
+            e.printStackTrace();
+            return false;
+        }
+    }
+
+    @Override
+    public abstract boolean hasNext() throws IOException;
+
+    @Override
+    public void setFeedLogManager(FeedLogManager feedLogManager) throws HyracksDataException {
+        this.feedLogManager = feedLogManager;
+        reader.setFeedLogManager(feedLogManager);
+    }
+
+    @Override
+    public void setController(AbstractFeedDataFlowController controller) {
+        reader.setController(controller);
+    }
+
+    @Override
+    public boolean handleException(Throwable th) {
+        return reader.handleException(th);
+    }
+
+    @Override
+    public void notifyNewSource() {
+        throw new UnsupportedOperationException();
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d630d1a2/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/stream/StreamRecordReaderFactory.java
----------------------------------------------------------------------
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/stream/StreamRecordReaderFactory.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/stream/StreamRecordReaderFactory.java
new file mode 100644
index 0000000..f743a3f
--- /dev/null
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/stream/StreamRecordReaderFactory.java
@@ -0,0 +1,72 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.input.record.reader.stream;
+
+import java.util.Map;
+
+import org.apache.asterix.common.exceptions.AsterixException;
+import org.apache.asterix.external.api.IInputStreamFactory;
+import org.apache.asterix.external.api.IRecordReader;
+import org.apache.asterix.external.api.IRecordReaderFactory;
+import org.apache.asterix.external.provider.StreamRecordReaderProvider;
+import org.apache.asterix.external.provider.StreamRecordReaderProvider.Format;
+import org.apache.hyracks.algebricks.common.constraints.AlgebricksAbsolutePartitionConstraint;
+import org.apache.hyracks.api.context.IHyracksTaskContext;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
+
+public class StreamRecordReaderFactory implements IRecordReaderFactory<char[]> {
+
+    private static final long serialVersionUID = 1L;
+    protected final IInputStreamFactory streamFactory;
+    protected Map<String, String> configuration;
+    protected Format format;
+
+    public StreamRecordReaderFactory(IInputStreamFactory inputStreamFactory) {
+        this.streamFactory = inputStreamFactory;
+    }
+
+    @Override
+    public DataSourceType getDataSourceType() {
+        return DataSourceType.RECORDS;
+    }
+
+    @Override
+    public Class<?> getRecordClass() {
+        return char[].class;
+    }
+
+    @Override
+    public AlgebricksAbsolutePartitionConstraint getPartitionConstraint() throws AsterixException {
+        return streamFactory.getPartitionConstraint();
+    }
+
+    @Override
+    public void configure(Map<String, String> configuration) throws AsterixException {
+        this.configuration = configuration;
+        streamFactory.configure(configuration);
+        format = StreamRecordReaderProvider.getReaderFormat(configuration);
+    }
+
+    @Override
+    public IRecordReader<? extends char[]> createRecordReader(IHyracksTaskContext ctx, int partition)
+            throws HyracksDataException {
+        return StreamRecordReaderProvider.createRecordReader(format, streamFactory.createInputStream(ctx, partition),
+                configuration);
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d630d1a2/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/provider/StreamRecordReaderProvider.java
----------------------------------------------------------------------
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/provider/StreamRecordReaderProvider.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/provider/StreamRecordReaderProvider.java
new file mode 100644
index 0000000..ea8bc98
--- /dev/null
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/provider/StreamRecordReaderProvider.java
@@ -0,0 +1,81 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.provider;
+
+import java.util.Map;
+
+import org.apache.asterix.common.exceptions.AsterixException;
+import org.apache.asterix.external.api.AsterixInputStream;
+import org.apache.asterix.external.input.record.reader.stream.EmptyLineSeparatedRecordReader;
+import org.apache.asterix.external.input.record.reader.stream.LineRecordReader;
+import org.apache.asterix.external.input.record.reader.stream.QuotedLineRecordReader;
+import org.apache.asterix.external.input.record.reader.stream.SemiStructuredRecordReader;
+import org.apache.asterix.external.input.record.reader.stream.StreamRecordReader;
+import org.apache.asterix.external.util.ExternalDataConstants;
+import org.apache.asterix.external.util.ExternalDataUtils;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
+
+public class StreamRecordReaderProvider {
+    public enum Format {
+        SEMISTRUCTURED,
+        CSV,
+        LINE_SEPARATED
+    }
+
+    public static Format getReaderFormat(Map<String, String> configuration) throws AsterixException {
+        String format = configuration.get(ExternalDataConstants.KEY_FORMAT);
+        if (format != null) {
+            switch (format) {
+                case ExternalDataConstants.FORMAT_ADM:
+                case ExternalDataConstants.FORMAT_JSON:
+                case ExternalDataConstants.FORMAT_SEMISTRUCTURED:
+                    return Format.SEMISTRUCTURED;
+                case ExternalDataConstants.FORMAT_LINE_SEPARATED:
+                    return Format.LINE_SEPARATED;
+                case ExternalDataConstants.FORMAT_DELIMITED_TEXT:
+                case ExternalDataConstants.FORMAT_CSV:
+                    return Format.CSV;
+            }
+            throw new AsterixException("Unknown format: " + format);
+        }
+        throw new AsterixException("Unspecified paramter: " + ExternalDataConstants.KEY_FORMAT);
+    }
+
+    public static StreamRecordReader createRecordReader(Format format, AsterixInputStream inputStream,
+            Map<String, String> configuration) throws HyracksDataException {
+        switch (format) {
+            case CSV:
+                String quoteString = configuration.get(ExternalDataConstants.KEY_QUOTE);
+                boolean hasHeader = ExternalDataUtils.hasHeader(configuration);
+                if (quoteString != null) {
+                    return new QuotedLineRecordReader(hasHeader, inputStream, quoteString);
+                } else {
+                    return new LineRecordReader(hasHeader, inputStream);
+                }
+            case LINE_SEPARATED:
+                return new EmptyLineSeparatedRecordReader(inputStream);
+            case SEMISTRUCTURED:
+                return new SemiStructuredRecordReader(inputStream,
+                        configuration.get(ExternalDataConstants.KEY_RECORD_START),
+                        configuration.get(ExternalDataConstants.KEY_RECORD_END));
+            default:
+                throw new HyracksDataException("Unknown format: " + format);
+        }
+    }
+}


[37/50] [abbrv] incubator-asterixdb git commit: Move merged files

Posted by im...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d630d1a2/asterixdb/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/util/FunctionMapUtil.java
----------------------------------------------------------------------
diff --git a/asterixdb/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/util/FunctionMapUtil.java b/asterixdb/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/util/FunctionMapUtil.java
new file mode 100644
index 0000000..cbf05b5
--- /dev/null
+++ b/asterixdb/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/util/FunctionMapUtil.java
@@ -0,0 +1,150 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.lang.sqlpp.util;
+
+import java.util.HashMap;
+import java.util.Map;
+
+import org.apache.asterix.common.exceptions.AsterixException;
+import org.apache.asterix.common.functions.FunctionConstants;
+import org.apache.asterix.common.functions.FunctionSignature;
+import org.apache.asterix.lang.common.util.FunctionUtil;
+import org.apache.asterix.om.functions.AsterixBuiltinFunctions;
+import org.apache.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
+import org.apache.hyracks.algebricks.core.algebra.functions.IFunctionInfo;
+
+public class FunctionMapUtil {
+
+    private final static String CORE_AGGREGATE_PREFIX = "coll_";
+
+    // Maps from a SQL function name to an AQL function name (i.e., AsterixDB internal name).
+    private static final Map<String, String> FUNCTION_NAME_MAP = new HashMap<>();
+
+    static {
+        FUNCTION_NAME_MAP.put("ceil", "ceiling"); //SQL: ceil,  AQL: ceiling
+        FUNCTION_NAME_MAP.put("length", "string-length"); // SQL: length,  AQL: string-length
+        FUNCTION_NAME_MAP.put("lower", "lowercase"); // SQL: lower, AQL: lowercase
+        FUNCTION_NAME_MAP.put("substr", "substring"); // SQL: substr,  AQL: substring
+        FUNCTION_NAME_MAP.put("upper", "uppercase"); //SQL: upper, AQL: uppercase
+    }
+
+    /**
+     * Whether a function signature is a SQL-92 core aggregate function.
+     *
+     * @param fs,
+     *            the function signature.
+     * @return true if the function signature is a SQL-92 core aggregate,
+     *         false otherwise.
+     */
+    public static boolean isSql92AggregateFunction(FunctionSignature signature) throws AsterixException {
+        IFunctionInfo finfo = FunctionUtil.getFunctionInfo(new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
+                signature.getName().toLowerCase(), signature.getArity()));
+        if (finfo == null) {
+            return false;
+        }
+        return AsterixBuiltinFunctions.getAggregateFunction(finfo.getFunctionIdentifier()) != null;
+    }
+
+    /**
+     * Whether a function signature is a SQL++ core aggregate function.
+     *
+     * @param fs,
+     *            the function signature.
+     * @return true if the function signature is a SQL++ core aggregate,
+     *         false otherwise.
+     */
+    public static boolean isCoreAggregateFunction(FunctionSignature fs) {
+        String name = fs.getName().toLowerCase();
+        if (!name.startsWith(CORE_AGGREGATE_PREFIX)) {
+            return false;
+        }
+        IFunctionInfo finfo = FunctionUtil.getFunctionInfo(new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
+                name.substring(CORE_AGGREGATE_PREFIX.length()), fs.getArity()));
+        if (finfo == null) {
+            return false;
+        }
+        return AsterixBuiltinFunctions.getAggregateFunction(finfo.getFunctionIdentifier()) != null;
+    }
+
+    /**
+     * Get the corresponding SQL++ core aggregate function from the SQL-92 aggregate function.
+     *
+     * @param fs,
+     *            the SQL-92 aggregate function signature.
+     * @return the SQL++ aggregate function signature.
+     * @throws AsterixException
+     */
+    public static FunctionSignature sql92ToCoreAggregateFunction(FunctionSignature fs) throws AsterixException {
+        if (!isSql92AggregateFunction(fs)) {
+            return fs;
+        }
+        return new FunctionSignature(fs.getNamespace(), CORE_AGGREGATE_PREFIX + fs.getName(), fs.getArity());
+    }
+
+    /**
+     * Maps a user invoked function signature to a system internal function signature.
+     *
+     * @param fs,
+     *            the user typed function.
+     * @return the system internal function.
+     */
+    public static FunctionSignature normalizeBuiltinFunctionSignature(FunctionSignature fs, boolean checkSql92Aggregate)
+            throws AsterixException {
+        String mappedName = internalizeBuiltinScalarFunctionName(fs.getName());
+        if (isCoreAggregateFunction(fs)) {
+            mappedName = internalizeCoreAggregateFunctionName(mappedName);
+        } else if (checkSql92Aggregate && isSql92AggregateFunction(fs)) {
+            throw new AsterixException(fs.getName()
+                    + " is a SQL-92 aggregate function. The SQL++ core aggregate function " + CORE_AGGREGATE_PREFIX
+                    + fs.getName().toLowerCase() + " could potentially express the intent.");
+        }
+        return new FunctionSignature(fs.getNamespace(), mappedName, fs.getArity());
+    }
+
+    /**
+     * Removes the "coll_" prefix for user-facing SQL++ core aggregate function names.
+     *
+     * @param name,
+     *            the name of a user-facing SQL++ core aggregate function name.
+     * @return the AsterixDB internal function name for the aggregate function.
+     * @throws AsterixException
+     */
+    private static String internalizeCoreAggregateFunctionName(String name) throws AsterixException {
+        String lowerCaseName = name.toLowerCase();
+        return lowerCaseName.substring(CORE_AGGREGATE_PREFIX.length());
+    }
+
+    /**
+     * Note: function name normalization can ONLY be called
+     * after all user-defined functions (by either "DECLARE FUNCTION" or "CREATE FUNCTION")
+     * are inlined, because user-defined function names are case-sensitive.
+     *
+     * @param name
+     *            the user-input function name in the query.
+     * @return the mapped internal name.
+     */
+    private static String internalizeBuiltinScalarFunctionName(String name) {
+        String lowerCaseName = name.toLowerCase();
+        String mappedName = FUNCTION_NAME_MAP.get(lowerCaseName);
+        if (mappedName != null) {
+            return mappedName;
+        }
+        return lowerCaseName;
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d630d1a2/asterixdb/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/visitor/CheckSql92AggregateVisitor.java
----------------------------------------------------------------------
diff --git a/asterixdb/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/visitor/CheckSql92AggregateVisitor.java b/asterixdb/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/visitor/CheckSql92AggregateVisitor.java
new file mode 100644
index 0000000..1bca7ac
--- /dev/null
+++ b/asterixdb/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/visitor/CheckSql92AggregateVisitor.java
@@ -0,0 +1,265 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.lang.sqlpp.visitor;
+
+import java.util.List;
+
+import org.apache.asterix.common.exceptions.AsterixException;
+import org.apache.asterix.common.functions.FunctionSignature;
+import org.apache.asterix.lang.common.base.Expression;
+import org.apache.asterix.lang.common.base.ILangExpression;
+import org.apache.asterix.lang.common.clause.GroupbyClause;
+import org.apache.asterix.lang.common.clause.LetClause;
+import org.apache.asterix.lang.common.clause.LimitClause;
+import org.apache.asterix.lang.common.clause.OrderbyClause;
+import org.apache.asterix.lang.common.clause.WhereClause;
+import org.apache.asterix.lang.common.expression.CallExpr;
+import org.apache.asterix.lang.common.expression.FieldAccessor;
+import org.apache.asterix.lang.common.expression.FieldBinding;
+import org.apache.asterix.lang.common.expression.IfExpr;
+import org.apache.asterix.lang.common.expression.IndexAccessor;
+import org.apache.asterix.lang.common.expression.ListConstructor;
+import org.apache.asterix.lang.common.expression.LiteralExpr;
+import org.apache.asterix.lang.common.expression.OperatorExpr;
+import org.apache.asterix.lang.common.expression.QuantifiedExpression;
+import org.apache.asterix.lang.common.expression.RecordConstructor;
+import org.apache.asterix.lang.common.expression.UnaryExpr;
+import org.apache.asterix.lang.common.expression.VariableExpr;
+import org.apache.asterix.lang.common.statement.FunctionDecl;
+import org.apache.asterix.lang.common.statement.Query;
+import org.apache.asterix.lang.sqlpp.clause.FromClause;
+import org.apache.asterix.lang.sqlpp.clause.FromTerm;
+import org.apache.asterix.lang.sqlpp.clause.HavingClause;
+import org.apache.asterix.lang.sqlpp.clause.JoinClause;
+import org.apache.asterix.lang.sqlpp.clause.NestClause;
+import org.apache.asterix.lang.sqlpp.clause.Projection;
+import org.apache.asterix.lang.sqlpp.clause.SelectBlock;
+import org.apache.asterix.lang.sqlpp.clause.SelectClause;
+import org.apache.asterix.lang.sqlpp.clause.SelectElement;
+import org.apache.asterix.lang.sqlpp.clause.SelectRegular;
+import org.apache.asterix.lang.sqlpp.clause.SelectSetOperation;
+import org.apache.asterix.lang.sqlpp.clause.UnnestClause;
+import org.apache.asterix.lang.sqlpp.expression.SelectExpression;
+import org.apache.asterix.lang.sqlpp.util.FunctionMapUtil;
+import org.apache.asterix.lang.sqlpp.visitor.base.AbstractSqlppQueryExpressionVisitor;
+
+/**
+ * This visitor checks if a language construct contains SQL-92 aggregates.
+ */
+public class CheckSql92AggregateVisitor extends AbstractSqlppQueryExpressionVisitor<Boolean, ILangExpression> {
+
+    @Override
+    public Boolean visit(Query q, ILangExpression parentSelectBlock) throws AsterixException {
+        return false;
+    }
+
+    @Override
+    public Boolean visit(FunctionDecl fd, ILangExpression parentSelectBlock) throws AsterixException {
+        return false;
+    }
+
+    @Override
+    public Boolean visit(LiteralExpr l, ILangExpression parentSelectBlock) throws AsterixException {
+        return false;
+    }
+
+    @Override
+    public Boolean visit(VariableExpr v, ILangExpression parentSelectBlock) throws AsterixException {
+        return false;
+    }
+
+    @Override
+    public Boolean visit(ListConstructor lc, ILangExpression parentSelectBlock) throws AsterixException {
+        return visitExprList(lc.getExprList(), parentSelectBlock);
+    }
+
+    @Override
+    public Boolean visit(RecordConstructor rc, ILangExpression parentSelectBlock) throws AsterixException {
+        for (FieldBinding fieldBinding : rc.getFbList()) {
+            ILangExpression leftExpr = fieldBinding.getLeftExpr();
+            ILangExpression rightExpr = fieldBinding.getRightExpr();
+            if (leftExpr.accept(this, parentSelectBlock)) {
+                return true;
+            }
+            if (rightExpr.accept(this, parentSelectBlock)) {
+                return true;
+            }
+        }
+        return false;
+    }
+
+    @Override
+    public Boolean visit(OperatorExpr ifbo, ILangExpression parentSelectBlock) throws AsterixException {
+        return visitExprList(ifbo.getExprList(), parentSelectBlock);
+    }
+
+    @Override
+    public Boolean visit(FieldAccessor fa, ILangExpression parentSelectBlock) throws AsterixException {
+        return fa.getExpr().accept(this, parentSelectBlock);
+    }
+
+    @Override
+    public Boolean visit(IndexAccessor ia, ILangExpression parentSelectBlock) throws AsterixException {
+        return ia.getExpr().accept(this, parentSelectBlock);
+    }
+
+    @Override
+    public Boolean visit(IfExpr ifexpr, ILangExpression parentSelectBlock) throws AsterixException {
+        if (ifexpr.getCondExpr().accept(this, parentSelectBlock)) {
+            return true;
+        } else {
+            return ifexpr.getThenExpr().accept(this, parentSelectBlock)
+                    || ifexpr.getElseExpr().accept(this, parentSelectBlock);
+        }
+    }
+
+    @Override
+    public Boolean visit(QuantifiedExpression qe, ILangExpression parentSelectBlock) throws AsterixException {
+        return false;
+    }
+
+    @Override
+    public Boolean visit(UnaryExpr u, ILangExpression parentSelectBlock) throws AsterixException {
+        return u.getExpr().accept(this, parentSelectBlock);
+    }
+
+    @Override
+    public Boolean visit(CallExpr pf, ILangExpression parentSelectBlock) throws AsterixException {
+        FunctionSignature fs = pf.getFunctionSignature();
+        if (FunctionMapUtil.isSql92AggregateFunction(fs)) {
+            return true;
+        }
+        for (Expression parameter : pf.getExprList()) {
+            if (parameter.accept(this, parentSelectBlock)) {
+                return true;
+            }
+        }
+        return false;
+    }
+
+    @Override
+    public Boolean visit(LetClause lc, ILangExpression parentSelectBlock) throws AsterixException {
+        return false;
+    }
+
+    @Override
+    public Boolean visit(WhereClause wc, ILangExpression parentSelectBlock) throws AsterixException {
+        return false;
+    }
+
+    @Override
+    public Boolean visit(OrderbyClause oc, ILangExpression parentSelectBlock) throws AsterixException {
+        return false;
+    }
+
+    @Override
+    public Boolean visit(GroupbyClause gc, ILangExpression parentSelectBlock) throws AsterixException {
+        return false;
+    }
+
+    @Override
+    public Boolean visit(LimitClause lc, ILangExpression parentSelectBlock) throws AsterixException {
+        return false;
+    }
+
+    @Override
+    public Boolean visit(FromClause fromClause, ILangExpression parentSelectBlock) throws AsterixException {
+        return false;
+    }
+
+    @Override
+    public Boolean visit(FromTerm fromTerm, ILangExpression parentSelectBlock) throws AsterixException {
+        return false;
+    }
+
+    @Override
+    public Boolean visit(JoinClause joinClause, ILangExpression parentSelectBlock) throws AsterixException {
+        return false;
+    }
+
+    @Override
+    public Boolean visit(NestClause nestClause, ILangExpression parentSelectBlock) throws AsterixException {
+        return false;
+    }
+
+    @Override
+    public Boolean visit(Projection projection, ILangExpression parentSelectBlock) throws AsterixException {
+        return projection.getExpression().accept(this, parentSelectBlock);
+    }
+
+    @Override
+    public Boolean visit(SelectBlock selectBlock, ILangExpression parentSelectBlock) throws AsterixException {
+        return selectBlock.getSelectClause().accept(this, selectBlock);
+    }
+
+    @Override
+    public Boolean visit(SelectClause selectClause, ILangExpression parentSelectBlock) throws AsterixException {
+        if (selectClause.selectElement()) {
+            return selectClause.getSelectElement().accept(this, parentSelectBlock);
+        } else {
+            return selectClause.getSelectRegular().accept(this, parentSelectBlock);
+        }
+    }
+
+    @Override
+    public Boolean visit(SelectElement selectElement, ILangExpression parentSelectBlock) throws AsterixException {
+        return selectElement.getExpression().accept(this, parentSelectBlock);
+    }
+
+    @Override
+    public Boolean visit(SelectRegular selectRegular, ILangExpression parentSelectBlock) throws AsterixException {
+        for (Projection projection : selectRegular.getProjections()) {
+            if (projection.accept(this, parentSelectBlock)) {
+                return true;
+            }
+        }
+        return false;
+    }
+
+    @Override
+    public Boolean visit(SelectSetOperation selectSetOperation, ILangExpression parentSelectBlock)
+            throws AsterixException {
+        return false;
+    }
+
+    @Override
+    public Boolean visit(SelectExpression selectStatement, ILangExpression parentSelectBlock) throws AsterixException {
+        return false;
+    }
+
+    @Override
+    public Boolean visit(UnnestClause unnestClause, ILangExpression parentSelectBlock) throws AsterixException {
+        return false;
+    }
+
+    @Override
+    public Boolean visit(HavingClause havingClause, ILangExpression parentSelectBlock) throws AsterixException {
+        return false;
+    }
+
+    private Boolean visitExprList(List<Expression> exprs, ILangExpression parentSelectBlock) throws AsterixException {
+        for (Expression item : exprs) {
+            if (item.accept(this, parentSelectBlock)) {
+                return true;
+            }
+        }
+        return false;
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d630d1a2/asterixdb/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/visitor/DeepCopyVisitor.java
----------------------------------------------------------------------
diff --git a/asterixdb/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/visitor/DeepCopyVisitor.java b/asterixdb/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/visitor/DeepCopyVisitor.java
new file mode 100644
index 0000000..2d891e0
--- /dev/null
+++ b/asterixdb/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/visitor/DeepCopyVisitor.java
@@ -0,0 +1,415 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.lang.sqlpp.visitor;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.asterix.common.exceptions.AsterixException;
+import org.apache.asterix.lang.common.base.Expression;
+import org.apache.asterix.lang.common.base.ILangExpression;
+import org.apache.asterix.lang.common.clause.GroupbyClause;
+import org.apache.asterix.lang.common.clause.LetClause;
+import org.apache.asterix.lang.common.clause.LimitClause;
+import org.apache.asterix.lang.common.clause.OrderbyClause;
+import org.apache.asterix.lang.common.clause.WhereClause;
+import org.apache.asterix.lang.common.expression.CallExpr;
+import org.apache.asterix.lang.common.expression.FieldAccessor;
+import org.apache.asterix.lang.common.expression.FieldBinding;
+import org.apache.asterix.lang.common.expression.GbyVariableExpressionPair;
+import org.apache.asterix.lang.common.expression.IfExpr;
+import org.apache.asterix.lang.common.expression.IndexAccessor;
+import org.apache.asterix.lang.common.expression.ListConstructor;
+import org.apache.asterix.lang.common.expression.LiteralExpr;
+import org.apache.asterix.lang.common.expression.OperatorExpr;
+import org.apache.asterix.lang.common.expression.QuantifiedExpression;
+import org.apache.asterix.lang.common.expression.RecordConstructor;
+import org.apache.asterix.lang.common.expression.UnaryExpr;
+import org.apache.asterix.lang.common.expression.VariableExpr;
+import org.apache.asterix.lang.common.statement.FunctionDecl;
+import org.apache.asterix.lang.common.statement.Query;
+import org.apache.asterix.lang.common.struct.Identifier;
+import org.apache.asterix.lang.common.struct.QuantifiedPair;
+import org.apache.asterix.lang.sqlpp.clause.AbstractBinaryCorrelateClause;
+import org.apache.asterix.lang.sqlpp.clause.FromClause;
+import org.apache.asterix.lang.sqlpp.clause.FromTerm;
+import org.apache.asterix.lang.sqlpp.clause.HavingClause;
+import org.apache.asterix.lang.sqlpp.clause.JoinClause;
+import org.apache.asterix.lang.sqlpp.clause.NestClause;
+import org.apache.asterix.lang.sqlpp.clause.Projection;
+import org.apache.asterix.lang.sqlpp.clause.SelectBlock;
+import org.apache.asterix.lang.sqlpp.clause.SelectClause;
+import org.apache.asterix.lang.sqlpp.clause.SelectElement;
+import org.apache.asterix.lang.sqlpp.clause.SelectRegular;
+import org.apache.asterix.lang.sqlpp.clause.SelectSetOperation;
+import org.apache.asterix.lang.sqlpp.clause.UnnestClause;
+import org.apache.asterix.lang.sqlpp.expression.SelectExpression;
+import org.apache.asterix.lang.sqlpp.struct.SetOperationInput;
+import org.apache.asterix.lang.sqlpp.struct.SetOperationRight;
+import org.apache.asterix.lang.sqlpp.visitor.base.AbstractSqlppQueryExpressionVisitor;
+import org.apache.hyracks.algebricks.common.utils.Pair;
+
+public class DeepCopyVisitor extends AbstractSqlppQueryExpressionVisitor<ILangExpression, Void> {
+
+    @Override
+    public FromClause visit(FromClause fromClause, Void arg) throws AsterixException {
+        List<FromTerm> fromTerms = new ArrayList<>();
+        for (FromTerm fromTerm : fromClause.getFromTerms()) {
+            fromTerms.add((FromTerm) fromTerm.accept(this, arg));
+        }
+        return new FromClause(fromTerms);
+    }
+
+    @Override
+    public FromTerm visit(FromTerm fromTerm, Void arg) throws AsterixException {
+        // Visit the left expression of a from term.
+        Expression fromExpr = (Expression) fromTerm.getLeftExpression().accept(this, arg);
+        VariableExpr fromVar = (VariableExpr) fromTerm.getLeftVariable().accept(this, arg);
+        VariableExpr positionVar = fromTerm.getPositionalVariable() == null ? null
+                : (VariableExpr) fromTerm.getPositionalVariable().accept(this, arg);
+
+        // Visits join/unnest/nest clauses.
+        List<AbstractBinaryCorrelateClause> correlateClauses = new ArrayList<>();
+        for (AbstractBinaryCorrelateClause correlateClause : fromTerm.getCorrelateClauses()) {
+            correlateClauses.add((AbstractBinaryCorrelateClause) correlateClause.accept(this, arg));
+        }
+        return new FromTerm(fromExpr, fromVar, positionVar, correlateClauses);
+    }
+
+    @Override
+    public JoinClause visit(JoinClause joinClause, Void arg) throws AsterixException {
+        Expression rightExpression = (Expression) joinClause.getRightExpression().accept(this, arg);
+        VariableExpr rightVar = (VariableExpr) joinClause.getRightVariable().accept(this, arg);
+        VariableExpr rightPositionVar = joinClause.getPositionalVariable() == null ? null
+                : (VariableExpr) joinClause.getPositionalVariable().accept(this, arg);
+        Expression conditionExpresion = (Expression) joinClause.getConditionExpression().accept(this, arg);
+        return new JoinClause(joinClause.getJoinType(), rightExpression, rightVar, rightPositionVar,
+                conditionExpresion);
+    }
+
+    @Override
+    public NestClause visit(NestClause nestClause, Void arg) throws AsterixException {
+        Expression rightExpression = (Expression) nestClause.getRightExpression().accept(this, arg);
+        VariableExpr rightVar = (VariableExpr) nestClause.getRightVariable().accept(this, arg);
+        VariableExpr rightPositionVar = nestClause.getPositionalVariable() == null ? null
+                : (VariableExpr) nestClause.getPositionalVariable().accept(this, arg);
+        Expression conditionExpresion = (Expression) nestClause.getConditionExpression().accept(this, arg);
+        return new NestClause(nestClause.getJoinType(), rightExpression, rightVar, rightPositionVar,
+                conditionExpresion);
+    }
+
+    @Override
+    public UnnestClause visit(UnnestClause unnestClause, Void arg) throws AsterixException {
+        Expression rightExpression = (Expression) unnestClause.getRightExpression().accept(this, arg);
+        VariableExpr rightVar = (VariableExpr) unnestClause.getRightVariable().accept(this, arg);
+        VariableExpr rightPositionVar = unnestClause.getPositionalVariable() == null ? null
+                : (VariableExpr) unnestClause.getPositionalVariable().accept(this, arg);
+        return new UnnestClause(unnestClause.getJoinType(), rightExpression, rightVar, rightPositionVar);
+    }
+
+    @Override
+    public Projection visit(Projection projection, Void arg) throws AsterixException {
+        return new Projection((Expression) projection.getExpression().accept(this, arg), projection.getName(),
+                projection.star(), projection.exprStar());
+    }
+
+    @Override
+    public SelectBlock visit(SelectBlock selectBlock, Void arg) throws AsterixException {
+        FromClause fromClause = null;
+        List<LetClause> letClauses = new ArrayList<>();
+        WhereClause whereClause = null;
+        GroupbyClause gbyClause = null;
+        List<LetClause> gbyLetClauses = new ArrayList<>();
+        HavingClause havingClause = null;
+        SelectClause selectCluase = null;
+        // Traverses the select block in the order of "from", "let"s, "where",
+        // "group by", "let"s, "having" and "select".
+        if (selectBlock.hasFromClause()) {
+            fromClause = (FromClause) selectBlock.getFromClause().accept(this, arg);
+        }
+        if (selectBlock.hasLetClauses()) {
+            List<LetClause> letList = selectBlock.getLetList();
+            for (LetClause letClause : letList) {
+                letClauses.add((LetClause) letClause.accept(this, arg));
+            }
+        }
+        if (selectBlock.hasWhereClause()) {
+            whereClause = (WhereClause) selectBlock.getWhereClause().accept(this, arg);
+        }
+        if (selectBlock.hasGroupbyClause()) {
+            gbyClause = (GroupbyClause) selectBlock.getGroupbyClause().accept(this, arg);
+        }
+        if (selectBlock.hasLetClausesAfterGroupby()) {
+            List<LetClause> letListAfterGby = selectBlock.getLetListAfterGroupby();
+            for (LetClause letClauseAfterGby : letListAfterGby) {
+                gbyLetClauses.add((LetClause) letClauseAfterGby.accept(this, arg));
+            }
+        }
+        if (selectBlock.hasHavingClause()) {
+            havingClause = (HavingClause) selectBlock.getHavingClause().accept(this, arg);
+        }
+        selectCluase = (SelectClause) selectBlock.getSelectClause().accept(this, arg);
+        return new SelectBlock(selectCluase, fromClause, letClauses, whereClause, gbyClause, gbyLetClauses,
+                havingClause);
+    }
+
+    @Override
+    public SelectClause visit(SelectClause selectClause, Void arg) throws AsterixException {
+        SelectElement selectElement = null;
+        SelectRegular selectRegular = null;
+        if (selectClause.selectElement()) {
+            selectElement = (SelectElement) selectClause.getSelectElement().accept(this, arg);
+        }
+        if (selectClause.selectRegular()) {
+            selectRegular = (SelectRegular) selectClause.getSelectRegular().accept(this, arg);
+        }
+        return new SelectClause(selectElement, selectRegular, selectClause.distinct());
+    }
+
+    @Override
+    public SelectElement visit(SelectElement selectElement, Void arg) throws AsterixException {
+        return new SelectElement((Expression) selectElement.getExpression().accept(this, arg));
+    }
+
+    @Override
+    public SelectRegular visit(SelectRegular selectRegular, Void arg) throws AsterixException {
+        List<Projection> projections = new ArrayList<>();
+        for (Projection projection : selectRegular.getProjections()) {
+            projections.add((Projection) projection.accept(this, arg));
+        }
+        return new SelectRegular(projections);
+    }
+
+    @Override
+    public SelectSetOperation visit(SelectSetOperation selectSetOperation, Void arg) throws AsterixException {
+        SetOperationInput leftInput = selectSetOperation.getLeftInput();
+        SetOperationInput newLeftInput = null;
+        if (leftInput.selectBlock()) {
+            newLeftInput = new SetOperationInput((SelectBlock) leftInput.accept(this, arg), null);
+        } else {
+            newLeftInput = new SetOperationInput(null, (SelectExpression) leftInput.accept(this, arg));
+        }
+        List<SetOperationRight> rightInputs = new ArrayList<>();
+        for (SetOperationRight right : selectSetOperation.getRightInputs()) {
+            SetOperationInput newRightInput = null;
+            SetOperationInput setOpRightInput = right.getSetOperationRightInput();
+            if (setOpRightInput.selectBlock()) {
+                newRightInput = new SetOperationInput((SelectBlock) leftInput.accept(this, arg), null);
+            } else {
+                newRightInput = new SetOperationInput(null, (SelectExpression) leftInput.accept(this, arg));
+            }
+            rightInputs.add(new SetOperationRight(right.getSetOpType(), right.isSetSemantics(), newRightInput));
+        }
+        return new SelectSetOperation(newLeftInput, rightInputs);
+    }
+
+    @Override
+    public HavingClause visit(HavingClause havingClause, Void arg) throws AsterixException {
+        return new HavingClause((Expression) havingClause.getFilterExpression().accept(this, arg));
+    }
+
+    @Override
+    public Query visit(Query q, Void arg) throws AsterixException {
+        return new Query(q.isTopLevel(), (Expression) q.getBody().accept(this, arg), q.getVarCounter(),
+                q.getDataverses(), q.getDatasets());
+    }
+
+    @Override
+    public FunctionDecl visit(FunctionDecl fd, Void arg) throws AsterixException {
+        return new FunctionDecl(fd.getSignature(), fd.getParamList(), (Expression) fd.getFuncBody().accept(this, arg));
+    }
+
+    @Override
+    public WhereClause visit(WhereClause whereClause, Void arg) throws AsterixException {
+        return new WhereClause((Expression) whereClause.getWhereExpr().accept(this, arg));
+    }
+
+    @Override
+    public OrderbyClause visit(OrderbyClause oc, Void arg) throws AsterixException {
+        List<Expression> newOrderbyList = new ArrayList<Expression>();
+        for (Expression orderExpr : oc.getOrderbyList()) {
+            newOrderbyList.add((Expression) orderExpr.accept(this, arg));
+        }
+        return new OrderbyClause(newOrderbyList, oc.getModifierList());
+    }
+
+    @Override
+    public GroupbyClause visit(GroupbyClause gc, Void arg) throws AsterixException {
+        List<GbyVariableExpressionPair> gbyPairList = new ArrayList<>();
+        List<GbyVariableExpressionPair> decorPairList = new ArrayList<>();
+        List<VariableExpr> withVarList = new ArrayList<>();
+        VariableExpr groupVarExpr = null;
+        List<Pair<Expression, Identifier>> groupFieldList = new ArrayList<>();
+        for (GbyVariableExpressionPair gbyVarExpr : gc.getGbyPairList()) {
+            gbyPairList.add(new GbyVariableExpressionPair((VariableExpr) gbyVarExpr.getVar().accept(this, arg),
+                    (Expression) gbyVarExpr.getExpr().accept(this, arg)));
+        }
+        for (GbyVariableExpressionPair gbyVarExpr : gc.getDecorPairList()) {
+            decorPairList.add(new GbyVariableExpressionPair((VariableExpr) gbyVarExpr.getVar().accept(this, arg),
+                    (Expression) gbyVarExpr.getExpr().accept(this, arg)));
+        }
+        for (VariableExpr withVar : gc.getWithVarList()) {
+            withVarList.add((VariableExpr) withVar.accept(this, arg));
+        }
+        if (gc.hasGroupVar()) {
+            groupVarExpr = (VariableExpr) gc.getGroupVar().accept(this, arg);
+        }
+        for (Pair<Expression, Identifier> field : gc.getGroupFieldList()) {
+            groupFieldList.add(new Pair<>((Expression) field.first.accept(this, arg), field.second));
+        }
+        return new GroupbyClause(gbyPairList, decorPairList, withVarList, groupVarExpr, groupFieldList,
+                gc.hasHashGroupByHint(), gc.isGroupAll());
+    }
+
+    @Override
+    public LimitClause visit(LimitClause limitClause, Void arg) throws AsterixException {
+        Expression limitExpr = (Expression) limitClause.getLimitExpr().accept(this, arg);
+        Expression offsetExpr = limitClause.hasOffset() ? (Expression) limitClause.getOffset().accept(this, arg) : null;
+        return new LimitClause(limitExpr, offsetExpr);
+    }
+
+    @Override
+    public LetClause visit(LetClause letClause, Void arg) throws AsterixException {
+        return new LetClause((VariableExpr) letClause.getVarExpr().accept(this, arg),
+                (Expression) letClause.getBindingExpr().accept(this, arg));
+    }
+
+    @Override
+    public SelectExpression visit(SelectExpression selectExpression, Void arg) throws AsterixException {
+        List<LetClause> lets = new ArrayList<>();
+        SelectSetOperation select = null;
+        OrderbyClause orderby = null;
+        LimitClause limit = null;
+
+        // visit let list
+        if (selectExpression.hasLetClauses()) {
+            for (LetClause letClause : selectExpression.getLetList()) {
+                lets.add((LetClause) letClause.accept(this, arg));
+            }
+        }
+
+        // visit the main select.
+        select = (SelectSetOperation) selectExpression.getSelectSetOperation().accept(this, arg);
+
+        // visit order by
+        if (selectExpression.hasOrderby()) {
+            List<Expression> orderExprs = new ArrayList<>();
+            for (Expression orderExpr : selectExpression.getOrderbyClause().getOrderbyList()) {
+                orderExprs.add((Expression) orderExpr.accept(this, arg));
+            }
+            orderby = new OrderbyClause(orderExprs, selectExpression.getOrderbyClause().getModifierList());
+        }
+
+        // visit limit
+        if (selectExpression.hasLimit()) {
+            limit = (LimitClause) selectExpression.getLimitClause().accept(this, arg);
+        }
+        return new SelectExpression(lets, select, orderby, limit, selectExpression.isSubquery());
+    }
+
+    @Override
+    public LiteralExpr visit(LiteralExpr l, Void arg) throws AsterixException {
+        return l;
+    }
+
+    @Override
+    public ListConstructor visit(ListConstructor lc, Void arg) throws AsterixException {
+        List<Expression> newExprList = new ArrayList<Expression>();
+        for (Expression expr : lc.getExprList()) {
+            newExprList.add((Expression) expr.accept(this, arg));
+        }
+        return new ListConstructor(lc.getType(), newExprList);
+    }
+
+    @Override
+    public RecordConstructor visit(RecordConstructor rc, Void arg) throws AsterixException {
+        List<FieldBinding> bindings = new ArrayList<>();
+        for (FieldBinding binding : rc.getFbList()) {
+            FieldBinding fb = new FieldBinding((Expression) binding.getLeftExpr().accept(this, arg),
+                    (Expression) binding.getRightExpr().accept(this, arg));
+            bindings.add(fb);
+        }
+        return new RecordConstructor(bindings);
+    }
+
+    @Override
+    public OperatorExpr visit(OperatorExpr operatorExpr, Void arg) throws AsterixException {
+        List<Expression> newExprList = new ArrayList<Expression>();
+        for (Expression expr : operatorExpr.getExprList()) {
+            newExprList.add((Expression) expr.accept(this, arg));
+        }
+        return new OperatorExpr(newExprList, operatorExpr.getExprBroadcastIdx(), operatorExpr.getOpList(),
+                operatorExpr.isCurrentop());
+    }
+
+    @Override
+    public IfExpr visit(IfExpr ifExpr, Void arg) throws AsterixException {
+        Expression conditionExpr = (Expression) ifExpr.getCondExpr().accept(this, arg);
+        Expression thenExpr = (Expression) ifExpr.getThenExpr().accept(this, arg);
+        Expression elseExpr = (Expression) ifExpr.getElseExpr().accept(this, arg);
+        return new IfExpr(conditionExpr, thenExpr, elseExpr);
+    }
+
+    @Override
+    public QuantifiedExpression visit(QuantifiedExpression qe, Void arg) throws AsterixException {
+        List<QuantifiedPair> quantifiedPairs = new ArrayList<>();
+        for (QuantifiedPair pair : qe.getQuantifiedList()) {
+            Expression expr = (Expression) pair.getExpr().accept(this, arg);
+            VariableExpr var = (VariableExpr) pair.getVarExpr().accept(this, arg);
+            quantifiedPairs.add(new QuantifiedPair(var, expr));
+        }
+        Expression condition = (Expression) qe.getSatisfiesExpr().accept(this, arg);
+        return new QuantifiedExpression(qe.getQuantifier(), quantifiedPairs, condition);
+    }
+
+    @Override
+    public CallExpr visit(CallExpr callExpr, Void arg) throws AsterixException {
+        List<Expression> newExprList = new ArrayList<Expression>();
+        for (Expression expr : callExpr.getExprList()) {
+            newExprList.add((Expression) expr.accept(this, arg));
+        }
+        return new CallExpr(callExpr.getFunctionSignature(), newExprList);
+    }
+
+    @Override
+    public VariableExpr visit(VariableExpr varExpr, Void arg) throws AsterixException {
+        return new VariableExpr(varExpr.getVar());
+    }
+
+    @Override
+    public UnaryExpr visit(UnaryExpr u, Void arg) throws AsterixException {
+        return new UnaryExpr(u.getSign(), (Expression) u.getExpr().accept(this, arg));
+    }
+
+    @Override
+    public FieldAccessor visit(FieldAccessor fa, Void arg) throws AsterixException {
+        return new FieldAccessor((Expression) fa.getExpr().accept(this, arg), fa.getIdent());
+    }
+
+    @Override
+    public Expression visit(IndexAccessor ia, Void arg) throws AsterixException {
+        Expression expr = (Expression) ia.getExpr().accept(this, arg);
+        Expression indexExpr = null;
+        if (ia.getIndexExpr() != null) {
+            indexExpr = ia.getIndexExpr();
+        }
+        return new IndexAccessor(expr, indexExpr);
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d630d1a2/asterixdb/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/visitor/FreeVariableVisitor.java
----------------------------------------------------------------------
diff --git a/asterixdb/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/visitor/FreeVariableVisitor.java b/asterixdb/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/visitor/FreeVariableVisitor.java
new file mode 100644
index 0000000..6e70455
--- /dev/null
+++ b/asterixdb/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/visitor/FreeVariableVisitor.java
@@ -0,0 +1,471 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.lang.sqlpp.visitor;
+
+import java.util.Collection;
+import java.util.HashSet;
+import java.util.List;
+
+import org.apache.asterix.common.exceptions.AsterixException;
+import org.apache.asterix.lang.common.base.Clause.ClauseType;
+import org.apache.asterix.lang.common.base.Expression;
+import org.apache.asterix.lang.common.clause.GroupbyClause;
+import org.apache.asterix.lang.common.clause.LetClause;
+import org.apache.asterix.lang.common.clause.LimitClause;
+import org.apache.asterix.lang.common.clause.OrderbyClause;
+import org.apache.asterix.lang.common.clause.WhereClause;
+import org.apache.asterix.lang.common.expression.CallExpr;
+import org.apache.asterix.lang.common.expression.FieldAccessor;
+import org.apache.asterix.lang.common.expression.FieldBinding;
+import org.apache.asterix.lang.common.expression.GbyVariableExpressionPair;
+import org.apache.asterix.lang.common.expression.IfExpr;
+import org.apache.asterix.lang.common.expression.IndexAccessor;
+import org.apache.asterix.lang.common.expression.ListConstructor;
+import org.apache.asterix.lang.common.expression.LiteralExpr;
+import org.apache.asterix.lang.common.expression.OperatorExpr;
+import org.apache.asterix.lang.common.expression.QuantifiedExpression;
+import org.apache.asterix.lang.common.expression.RecordConstructor;
+import org.apache.asterix.lang.common.expression.UnaryExpr;
+import org.apache.asterix.lang.common.expression.VariableExpr;
+import org.apache.asterix.lang.common.statement.FunctionDecl;
+import org.apache.asterix.lang.common.statement.Query;
+import org.apache.asterix.lang.common.struct.Identifier;
+import org.apache.asterix.lang.common.struct.QuantifiedPair;
+import org.apache.asterix.lang.sqlpp.clause.AbstractBinaryCorrelateClause;
+import org.apache.asterix.lang.sqlpp.clause.FromClause;
+import org.apache.asterix.lang.sqlpp.clause.FromTerm;
+import org.apache.asterix.lang.sqlpp.clause.HavingClause;
+import org.apache.asterix.lang.sqlpp.clause.JoinClause;
+import org.apache.asterix.lang.sqlpp.clause.NestClause;
+import org.apache.asterix.lang.sqlpp.clause.Projection;
+import org.apache.asterix.lang.sqlpp.clause.SelectBlock;
+import org.apache.asterix.lang.sqlpp.clause.SelectClause;
+import org.apache.asterix.lang.sqlpp.clause.SelectElement;
+import org.apache.asterix.lang.sqlpp.clause.SelectRegular;
+import org.apache.asterix.lang.sqlpp.clause.SelectSetOperation;
+import org.apache.asterix.lang.sqlpp.clause.UnnestClause;
+import org.apache.asterix.lang.sqlpp.expression.SelectExpression;
+import org.apache.asterix.lang.sqlpp.struct.SetOperationRight;
+import org.apache.asterix.lang.sqlpp.util.SqlppVariableUtil;
+import org.apache.asterix.lang.sqlpp.visitor.base.AbstractSqlppQueryExpressionVisitor;
+import org.apache.hyracks.algebricks.common.utils.Pair;
+
+public class FreeVariableVisitor extends AbstractSqlppQueryExpressionVisitor<Void, Collection<VariableExpr>> {
+
+    @Override
+    public Void visit(FromClause fromClause, Collection<VariableExpr> freeVars) throws AsterixException {
+        Collection<VariableExpr> bindingVars = new HashSet<>();
+        for (FromTerm fromTerm : fromClause.getFromTerms()) {
+            Collection<VariableExpr> fromTermFreeVars = new HashSet<>();
+            fromTerm.accept(this, fromTermFreeVars);
+
+            // Since a right from term can refer to variables defined in a left from term,
+            // we remove binding variables from the free variables.
+            fromTermFreeVars.removeAll(bindingVars);
+
+            // Adds binding variables.
+            bindingVars.addAll(SqlppVariableUtil.getBindingVariables(fromTerm));
+
+            // Adds into freeVars.
+            freeVars.addAll(fromTermFreeVars);
+        }
+        return null;
+    }
+
+    @Override
+    public Void visit(FromTerm fromTerm, Collection<VariableExpr> freeVars) throws AsterixException {
+        // The encountered binding variables so far in the fromterm.
+        Collection<VariableExpr> bindingVariables = new HashSet<>();
+
+        // Visit the left expression of a from term.
+        fromTerm.getLeftExpression().accept(this, freeVars);
+
+        // Adds binding variables.
+        bindingVariables.add(fromTerm.getLeftVariable());
+        if (fromTerm.hasPositionalVariable()) {
+            bindingVariables.add(fromTerm.getPositionalVariable());
+        }
+
+        // Visits join/unnest/nest clauses.
+        for (AbstractBinaryCorrelateClause correlateClause : fromTerm.getCorrelateClauses()) {
+            Collection<VariableExpr> correlateFreeVars = new HashSet<>();
+            correlateClause.accept(this, correlateFreeVars);
+            if (correlateClause.getClauseType() != ClauseType.JOIN_CLAUSE) {
+                // Correlation is allowed if the clause is not a join clause,
+                // therefore we remove left-side binding variables for these cases.
+                correlateFreeVars.removeAll(bindingVariables);
+
+                // Adds binding variables.
+                bindingVariables.add(correlateClause.getRightVariable());
+                if (correlateClause.hasPositionalVariable()) {
+                    bindingVariables.add(correlateClause.getPositionalVariable());
+                }
+            }
+            freeVars.addAll(correlateFreeVars);
+        }
+        return null;
+    }
+
+    @Override
+    public Void visit(JoinClause joinClause, Collection<VariableExpr> freeVars) throws AsterixException {
+        visitJoinAndNest(joinClause, joinClause.getConditionExpression(), freeVars);
+        return null;
+    }
+
+    @Override
+    public Void visit(NestClause nestClause, Collection<VariableExpr> freeVars) throws AsterixException {
+        visitJoinAndNest(nestClause, nestClause.getConditionExpression(), freeVars);
+        return null;
+    }
+
+    @Override
+    public Void visit(UnnestClause unnestClause, Collection<VariableExpr> freeVars) throws AsterixException {
+        unnestClause.getRightExpression().accept(this, freeVars);
+        return null;
+    }
+
+    @Override
+    public Void visit(Projection projection, Collection<VariableExpr> freeVars) throws AsterixException {
+        projection.getExpression().accept(this, freeVars);
+        return null;
+    }
+
+    @Override
+    public Void visit(SelectBlock selectBlock, Collection<VariableExpr> freeVars) throws AsterixException {
+        Collection<VariableExpr> selectFreeVars = new HashSet<>();
+        Collection<VariableExpr> fromFreeVars = new HashSet<>();
+        Collection<VariableExpr> letsFreeVars = new HashSet<>();
+        Collection<VariableExpr> whereFreeVars = new HashSet<>();
+        Collection<VariableExpr> gbyFreeVars = new HashSet<>();
+        Collection<VariableExpr> gbyLetsFreeVars = new HashSet<>();
+
+        Collection<VariableExpr> fromBindingVars = SqlppVariableUtil.getBindingVariables(selectBlock.getFromClause());
+        Collection<VariableExpr> letsBindingVars = SqlppVariableUtil.getBindingVariables(selectBlock.getLetList());
+        Collection<VariableExpr> gbyBindingVars = SqlppVariableUtil.getBindingVariables(selectBlock.getGroupbyClause());
+        Collection<VariableExpr> gbyLetsBindingVars = SqlppVariableUtil
+                .getBindingVariables(selectBlock.getLetListAfterGroupby());
+
+        selectBlock.getSelectClause().accept(this, selectFreeVars);
+        // Removes group-by, from, let, and gby-let binding vars.
+        removeAllBindingVarsInSelectBlock(selectFreeVars, fromBindingVars, letsBindingVars, gbyLetsBindingVars);
+
+        if (selectBlock.hasFromClause()) {
+            selectBlock.getFromClause().accept(this, fromFreeVars);
+        }
+        if (selectBlock.hasLetClauses()) {
+            visitLetClauses(selectBlock.getLetList(), letsFreeVars);
+            letsFreeVars.removeAll(fromBindingVars);
+        }
+        if (selectBlock.hasWhereClause()) {
+            selectBlock.getWhereClause().accept(this, whereFreeVars);
+            whereFreeVars.removeAll(fromBindingVars);
+            whereFreeVars.removeAll(letsBindingVars);
+        }
+        if (selectBlock.hasGroupbyClause()) {
+            selectBlock.getGroupbyClause().accept(this, gbyFreeVars);
+            // Remove group-by and let binding vars.
+            gbyFreeVars.removeAll(fromBindingVars);
+            gbyFreeVars.removeAll(letsBindingVars);
+            if (selectBlock.hasLetClausesAfterGroupby()) {
+                visitLetClauses(selectBlock.getLetListAfterGroupby(), gbyLetsFreeVars);
+                gbyLetsFreeVars.removeAll(fromBindingVars);
+                gbyLetsFreeVars.removeAll(letsBindingVars);
+                gbyLetsFreeVars.removeAll(gbyBindingVars);
+            }
+            if (selectBlock.hasHavingClause()) {
+                selectBlock.getHavingClause().accept(this, selectFreeVars);
+                removeAllBindingVarsInSelectBlock(selectFreeVars, fromBindingVars, letsBindingVars, gbyLetsBindingVars);
+            }
+        }
+
+        // Removes all binding vars from <code>freeVars</code>, which contains the free
+        // vars in the order-by and limit.
+        removeAllBindingVarsInSelectBlock(freeVars, fromBindingVars, letsBindingVars, gbyLetsBindingVars);
+
+        // Adds all free vars.
+        freeVars.addAll(selectFreeVars);
+        freeVars.addAll(fromFreeVars);
+        freeVars.addAll(letsFreeVars);
+        freeVars.addAll(whereFreeVars);
+        freeVars.addAll(gbyFreeVars);
+        freeVars.addAll(gbyLetsFreeVars);
+        return null;
+    }
+
+    @Override
+    public Void visit(SelectClause selectClause, Collection<VariableExpr> freeVars) throws AsterixException {
+        if (selectClause.selectElement()) {
+            selectClause.getSelectElement().accept(this, freeVars);
+        }
+        if (selectClause.selectRegular()) {
+            selectClause.getSelectRegular().accept(this, freeVars);
+        }
+        return null;
+    }
+
+    @Override
+    public Void visit(SelectElement selectElement, Collection<VariableExpr> freeVars) throws AsterixException {
+        selectElement.getExpression().accept(this, freeVars);
+        return null;
+    }
+
+    @Override
+    public Void visit(SelectRegular selectRegular, Collection<VariableExpr> freeVars) throws AsterixException {
+        for (Projection projection : selectRegular.getProjections()) {
+            projection.accept(this, freeVars);
+        }
+        return null;
+    }
+
+    @Override
+    public Void visit(SelectSetOperation selectSetOperation, Collection<VariableExpr> freeVars)
+            throws AsterixException {
+        selectSetOperation.getLeftInput().accept(this, freeVars);
+        for (SetOperationRight right : selectSetOperation.getRightInputs()) {
+            right.getSetOperationRightInput().accept(this, freeVars);
+        }
+        return null;
+    }
+
+    @Override
+    public Void visit(HavingClause havingClause, Collection<VariableExpr> freeVars) throws AsterixException {
+        havingClause.getFilterExpression().accept(this, freeVars);
+        return null;
+    }
+
+    @Override
+    public Void visit(Query q, Collection<VariableExpr> freeVars) throws AsterixException {
+        q.getBody().accept(this, freeVars);
+        return null;
+    }
+
+    @Override
+    public Void visit(FunctionDecl fd, Collection<VariableExpr> freeVars) throws AsterixException {
+        fd.getFuncBody().accept(this, freeVars);
+        return null;
+    }
+
+    @Override
+    public Void visit(WhereClause whereClause, Collection<VariableExpr> freeVars) throws AsterixException {
+        whereClause.getWhereExpr().accept(this, freeVars);
+        return null;
+    }
+
+    @Override
+    public Void visit(OrderbyClause oc, Collection<VariableExpr> freeVars) throws AsterixException {
+        for (Expression orderExpr : oc.getOrderbyList()) {
+            orderExpr.accept(this, freeVars);
+        }
+        return null;
+    }
+
+    @Override
+    public Void visit(GroupbyClause gc, Collection<VariableExpr> freeVars) throws AsterixException {
+        // Puts all group-by variables into the symbol set of the new scope.
+        for (GbyVariableExpressionPair gbyVarExpr : gc.getGbyPairList()) {
+            gbyVarExpr.getExpr().accept(this, freeVars);
+        }
+        for (GbyVariableExpressionPair decorVarExpr : gc.getDecorPairList()) {
+            decorVarExpr.getExpr().accept(this, freeVars);
+        }
+        if (gc.hasGroupFieldList()) {
+            for (Pair<Expression, Identifier> groupField : gc.getGroupFieldList()) {
+                groupField.first.accept(this, freeVars);
+            }
+        }
+        return null;
+    }
+
+    @Override
+    public Void visit(LimitClause limitClause, Collection<VariableExpr> freeVars) throws AsterixException {
+        limitClause.getLimitExpr().accept(this, freeVars);
+        return null;
+    }
+
+    @Override
+    public Void visit(LetClause letClause, Collection<VariableExpr> freeVars) throws AsterixException {
+        letClause.getBindingExpr().accept(this, freeVars);
+        return null;
+    }
+
+    @Override
+    public Void visit(SelectExpression selectExpression, Collection<VariableExpr> freeVars) throws AsterixException {
+        Collection<VariableExpr> letsFreeVars = new HashSet<>();
+        Collection<VariableExpr> selectFreeVars = new HashSet<>();
+        visitLetClauses(selectExpression.getLetList(), letsFreeVars);
+
+        // visit order by
+        if (selectExpression.hasOrderby()) {
+            for (Expression orderExpr : selectExpression.getOrderbyClause().getOrderbyList()) {
+                orderExpr.accept(this, selectFreeVars);
+            }
+        }
+
+        // visit limit
+        if (selectExpression.hasLimit()) {
+            selectExpression.getLimitClause().accept(this, selectFreeVars);
+        }
+
+        // visit the main select
+        selectExpression.getSelectSetOperation().accept(this, selectFreeVars);
+
+        // Removed let binding variables.
+        selectFreeVars.removeAll(SqlppVariableUtil.getBindingVariables(selectExpression.getLetList()));
+        freeVars.addAll(letsFreeVars);
+        freeVars.addAll(selectFreeVars);
+        return null;
+    }
+
+    @Override
+    public Void visit(LiteralExpr l, Collection<VariableExpr> freeVars) throws AsterixException {
+        return null;
+    }
+
+    @Override
+    public Void visit(ListConstructor lc, Collection<VariableExpr> freeVars) throws AsterixException {
+        for (Expression expr : lc.getExprList()) {
+            expr.accept(this, freeVars);
+        }
+        return null;
+    }
+
+    @Override
+    public Void visit(RecordConstructor rc, Collection<VariableExpr> freeVars) throws AsterixException {
+        for (FieldBinding binding : rc.getFbList()) {
+            binding.getLeftExpr().accept(this, freeVars);
+            binding.getRightExpr().accept(this, freeVars);
+        }
+        return null;
+    }
+
+    @Override
+    public Void visit(OperatorExpr operatorExpr, Collection<VariableExpr> freeVars) throws AsterixException {
+        for (Expression expr : operatorExpr.getExprList()) {
+            expr.accept(this, freeVars);
+        }
+        return null;
+    }
+
+    @Override
+    public Void visit(IfExpr ifExpr, Collection<VariableExpr> freeVars) throws AsterixException {
+        ifExpr.getCondExpr().accept(this, freeVars);
+        ifExpr.getThenExpr().accept(this, freeVars);
+        ifExpr.getElseExpr().accept(this, freeVars);
+        return null;
+    }
+
+    @Override
+    public Void visit(QuantifiedExpression qe, Collection<VariableExpr> freeVars) throws AsterixException {
+        for (QuantifiedPair pair : qe.getQuantifiedList()) {
+            pair.getExpr().accept(this, freeVars);
+        }
+        qe.getSatisfiesExpr().accept(this, freeVars);
+        return null;
+    }
+
+    @Override
+    public Void visit(CallExpr callExpr, Collection<VariableExpr> freeVars) throws AsterixException {
+        for (Expression expr : callExpr.getExprList()) {
+            expr.accept(this, freeVars);
+        }
+        return null;
+    }
+
+    @Override
+    public Void visit(VariableExpr varExpr, Collection<VariableExpr> freeVars) throws AsterixException {
+        freeVars.add(varExpr);
+        return null;
+    }
+
+    @Override
+    public Void visit(UnaryExpr u, Collection<VariableExpr> freeVars) throws AsterixException {
+        u.getExpr().accept(this, freeVars);
+        return null;
+    }
+
+    @Override
+    public Void visit(FieldAccessor fa, Collection<VariableExpr> freeVars) throws AsterixException {
+        fa.getExpr().accept(this, freeVars);
+        return null;
+    }
+
+    @Override
+    public Void visit(IndexAccessor ia, Collection<VariableExpr> freeVars) throws AsterixException {
+        ia.getExpr().accept(this, freeVars);
+        if (ia.getIndexExpr() != null) {
+            ia.getIndexExpr();
+        }
+        return null;
+    }
+
+    private void visitLetClauses(List<LetClause> letClauses, Collection<VariableExpr> freeVars)
+            throws AsterixException {
+        if (letClauses == null || letClauses.isEmpty()) {
+            return;
+        }
+        Collection<VariableExpr> bindingVars = new HashSet<>();
+        for (LetClause letClause : letClauses) {
+            Collection<VariableExpr> letFreeVars = new HashSet<>();
+            letClause.accept(this, letFreeVars);
+
+            // Removes previous binding variables.
+            letFreeVars.removeAll(bindingVars);
+            freeVars.addAll(letFreeVars);
+
+            // Adds let binding variables into the binding variable collection.
+            bindingVars.add(letClause.getVarExpr());
+        }
+    }
+
+    private void visitJoinAndNest(AbstractBinaryCorrelateClause clause, Expression condition,
+            Collection<VariableExpr> freeVars) throws AsterixException {
+        clause.getRightExpression().accept(this, freeVars);
+        Collection<VariableExpr> conditionFreeVars = new HashSet<>();
+        condition.accept(this, freeVars);
+
+        // The condition expression can free binding variables defined in the join clause.
+        conditionFreeVars.remove(clause.getRightVariable());
+        if (clause.hasPositionalVariable()) {
+            conditionFreeVars.remove(clause.getPositionalVariable());
+        }
+        freeVars.addAll(conditionFreeVars);
+    }
+
+    /**
+     * Removes all binding variables defined in the select block for a free variable collection.
+     *
+     * @param freeVars,
+     *            free variables.
+     * @param fromBindingVars,
+     *            binding variables defined in the from clause of a select block.
+     * @param letsBindingVars,
+     *            binding variables defined in the let clauses of the select block.
+     * @param gbyLetsBindingVars,
+     *            binding variables defined in the let clauses after a group-by in the select block.
+     */
+    private void removeAllBindingVarsInSelectBlock(Collection<VariableExpr> selectFreeVars,
+            Collection<VariableExpr> fromBindingVars, Collection<VariableExpr> letsBindingVars,
+            Collection<VariableExpr> gbyLetsBindingVars) {
+        selectFreeVars.removeAll(fromBindingVars);
+        selectFreeVars.removeAll(letsBindingVars);
+        selectFreeVars.removeAll(gbyLetsBindingVars);
+        selectFreeVars.removeAll(gbyLetsBindingVars);
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d630d1a2/asterixdb/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/visitor/base/AbstractSqlppExpressionScopingVisitor.java
----------------------------------------------------------------------
diff --git a/asterixdb/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/visitor/base/AbstractSqlppExpressionScopingVisitor.java b/asterixdb/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/visitor/base/AbstractSqlppExpressionScopingVisitor.java
new file mode 100644
index 0000000..14e80d9
--- /dev/null
+++ b/asterixdb/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/visitor/base/AbstractSqlppExpressionScopingVisitor.java
@@ -0,0 +1,284 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.lang.sqlpp.visitor.base;
+
+import org.apache.asterix.common.exceptions.AsterixException;
+import org.apache.asterix.lang.common.base.Expression;
+import org.apache.asterix.lang.common.clause.GroupbyClause;
+import org.apache.asterix.lang.common.clause.LetClause;
+import org.apache.asterix.lang.common.clause.LimitClause;
+import org.apache.asterix.lang.common.context.Scope;
+import org.apache.asterix.lang.common.expression.GbyVariableExpressionPair;
+import org.apache.asterix.lang.common.expression.QuantifiedExpression;
+import org.apache.asterix.lang.common.expression.VariableExpr;
+import org.apache.asterix.lang.common.parser.ScopeChecker;
+import org.apache.asterix.lang.common.rewrites.LangRewritingContext;
+import org.apache.asterix.lang.common.statement.FunctionDecl;
+import org.apache.asterix.lang.common.statement.Query;
+import org.apache.asterix.lang.common.struct.Identifier;
+import org.apache.asterix.lang.common.struct.QuantifiedPair;
+import org.apache.asterix.lang.common.struct.VarIdentifier;
+import org.apache.asterix.lang.sqlpp.clause.AbstractBinaryCorrelateClause;
+import org.apache.asterix.lang.sqlpp.clause.FromClause;
+import org.apache.asterix.lang.sqlpp.clause.FromTerm;
+import org.apache.asterix.lang.sqlpp.clause.JoinClause;
+import org.apache.asterix.lang.sqlpp.clause.NestClause;
+import org.apache.asterix.lang.sqlpp.clause.SelectSetOperation;
+import org.apache.asterix.lang.sqlpp.clause.UnnestClause;
+import org.apache.asterix.lang.sqlpp.expression.SelectExpression;
+import org.apache.asterix.lang.sqlpp.struct.SetOperationRight;
+import org.apache.hyracks.algebricks.core.algebra.base.Counter;
+
+public class AbstractSqlppExpressionScopingVisitor extends AbstractSqlppSimpleExpressionVisitor {
+
+    protected final ScopeChecker scopeChecker = new ScopeChecker();
+    protected final LangRewritingContext context;
+
+    /**
+     * @param context,
+     *            manages ids of variables and guarantees uniqueness of variables.
+     */
+    public AbstractSqlppExpressionScopingVisitor(LangRewritingContext context) {
+        this.context = context;
+        this.scopeChecker.setVarCounter(new Counter(context.getVarCounter()));
+    }
+
+    @Override
+    public Expression visit(FromClause fromClause, Expression arg) throws AsterixException {
+        scopeChecker.extendCurrentScope();
+        for (FromTerm fromTerm : fromClause.getFromTerms()) {
+            fromTerm.accept(this, arg);
+        }
+        return null;
+    }
+
+    @Override
+    public Expression visit(FromTerm fromTerm, Expression arg) throws AsterixException {
+        scopeChecker.createNewScope();
+        // Visit the left expression of a from term.
+        fromTerm.setLeftExpression(fromTerm.getLeftExpression().accept(this, arg));
+
+        // Registers the data item variable.
+        VariableExpr leftVar = fromTerm.getLeftVariable();
+        scopeChecker.getCurrentScope().addNewVarSymbolToScope(leftVar.getVar());
+
+        // Registers the positional variable
+        if (fromTerm.hasPositionalVariable()) {
+            VariableExpr posVar = fromTerm.getPositionalVariable();
+            scopeChecker.getCurrentScope().addNewVarSymbolToScope(posVar.getVar());
+        }
+        // Visits join/unnest/nest clauses.
+        for (AbstractBinaryCorrelateClause correlateClause : fromTerm.getCorrelateClauses()) {
+            correlateClause.accept(this, arg);
+        }
+        return null;
+    }
+
+    @Override
+    public Expression visit(JoinClause joinClause, Expression arg) throws AsterixException {
+        Scope backupScope = scopeChecker.removeCurrentScope();
+        Scope parentScope = scopeChecker.getCurrentScope();
+        scopeChecker.createNewScope();
+        // NOTE: the two join branches cannot be correlated, instead of checking
+        // the correlation here,
+        // we defer the check to the query optimizer.
+        joinClause.setRightExpression(joinClause.getRightExpression().accept(this, arg));
+
+        // Registers the data item variable.
+        VariableExpr rightVar = joinClause.getRightVariable();
+        scopeChecker.getCurrentScope().addNewVarSymbolToScope(rightVar.getVar());
+
+        if (joinClause.hasPositionalVariable()) {
+            // Registers the positional variable.
+            VariableExpr posVar = joinClause.getPositionalVariable();
+            scopeChecker.getCurrentScope().addNewVarSymbolToScope(posVar.getVar());
+        }
+
+        Scope rightScope = scopeChecker.removeCurrentScope();
+        Scope mergedScope = new Scope(scopeChecker, parentScope);
+        mergedScope.merge(backupScope);
+        mergedScope.merge(rightScope);
+        scopeChecker.pushExistingScope(mergedScope);
+        // The condition expression can refer to the just registered variables
+        // for the right branch.
+        joinClause.setConditionExpression(joinClause.getConditionExpression().accept(this, arg));
+        return null;
+    }
+
+    @Override
+    public Expression visit(NestClause nestClause, Expression arg) throws AsterixException {
+        // NOTE: the two branches of a NEST cannot be correlated, instead of
+        // checking the correlation here, we defer the check to the query
+        // optimizer.
+        nestClause.setRightExpression(nestClause.getRightExpression().accept(this, arg));
+
+        // Registers the data item variable.
+        VariableExpr rightVar = nestClause.getRightVariable();
+        scopeChecker.getCurrentScope().addNewVarSymbolToScope(rightVar.getVar());
+
+        if (nestClause.hasPositionalVariable()) {
+            // Registers the positional variable.
+            VariableExpr posVar = nestClause.getPositionalVariable();
+            scopeChecker.getCurrentScope().addNewVarSymbolToScope(posVar.getVar());
+        }
+
+        // The condition expression can refer to the just registered variables
+        // for the right branch.
+        nestClause.setConditionExpression(nestClause.getConditionExpression().accept(this, arg));
+        return null;
+    }
+
+    @Override
+    public Expression visit(UnnestClause unnestClause, Expression arg) throws AsterixException {
+        unnestClause.setRightExpression(unnestClause.getRightExpression().accept(this, arg));
+
+        // register the data item variable
+        VariableExpr rightVar = unnestClause.getRightVariable();
+        scopeChecker.getCurrentScope().addNewVarSymbolToScope(rightVar.getVar());
+
+        if (unnestClause.hasPositionalVariable()) {
+            // register the positional variable
+            VariableExpr posVar = unnestClause.getPositionalVariable();
+            scopeChecker.getCurrentScope().addNewVarSymbolToScope(posVar.getVar());
+        }
+        return null;
+    }
+
+    @Override
+    public Expression visit(SelectSetOperation selectSetOperation, Expression arg) throws AsterixException {
+        selectSetOperation.getLeftInput().accept(this, arg);
+        for (SetOperationRight right : selectSetOperation.getRightInputs()) {
+            scopeChecker.createNewScope();
+            right.getSetOperationRightInput().accept(this, arg);
+        }
+        return null;
+    }
+
+    @Override
+    public Expression visit(Query q, Expression arg) throws AsterixException {
+        q.setBody(q.getBody().accept(this, arg));
+        q.setVarCounter(scopeChecker.getVarCounter());
+        context.setVarCounter(scopeChecker.getVarCounter());
+        return null;
+    }
+
+    @Override
+    public Expression visit(FunctionDecl fd, Expression arg) throws AsterixException {
+        scopeChecker.createNewScope();
+        fd.setFuncBody(fd.getFuncBody().accept(this, arg));
+        scopeChecker.removeCurrentScope();
+        return null;
+    }
+
+    @Override
+    public Expression visit(GroupbyClause gc, Expression arg) throws AsterixException {
+        Scope newScope = scopeChecker.extendCurrentScopeNoPush(true);
+        // Puts all group-by variables into the symbol set of the new scope.
+        for (GbyVariableExpressionPair gbyVarExpr : gc.getGbyPairList()) {
+            gbyVarExpr.setExpr(gbyVarExpr.getExpr().accept(this, arg));
+            VariableExpr gbyVar = gbyVarExpr.getVar();
+            if (gbyVar != null) {
+                newScope.addNewVarSymbolToScope(gbyVarExpr.getVar().getVar());
+            }
+        }
+        for (VariableExpr withVar : gc.getWithVarList()) {
+            newScope.addNewVarSymbolToScope(withVar.getVar());
+        }
+        scopeChecker.replaceCurrentScope(newScope);
+        return null;
+    }
+
+    @Override
+    public Expression visit(LimitClause limitClause, Expression arg) throws AsterixException {
+        scopeChecker.pushForbiddenScope(scopeChecker.getCurrentScope());
+        limitClause.setLimitExpr(limitClause.getLimitExpr().accept(this, arg));
+        scopeChecker.popForbiddenScope();
+        return null;
+    }
+
+    @Override
+    public Expression visit(LetClause letClause, Expression arg) throws AsterixException {
+        scopeChecker.extendCurrentScope();
+        letClause.setBindingExpr(letClause.getBindingExpr().accept(this, arg));
+        scopeChecker.getCurrentScope().addNewVarSymbolToScope(letClause.getVarExpr().getVar());
+        return null;
+    }
+
+    @Override
+    public Expression visit(SelectExpression selectExpression, Expression arg) throws AsterixException {
+        Scope scopeBeforeSelectExpression = scopeChecker.getCurrentScope();
+        scopeChecker.createNewScope();
+
+        // visit let list
+        if (selectExpression.hasLetClauses()) {
+            for (LetClause letClause : selectExpression.getLetList()) {
+                letClause.accept(this, arg);
+            }
+        }
+
+        // visit the main select.
+        selectExpression.getSelectSetOperation().accept(this, selectExpression);
+
+        // visit order by
+        if (selectExpression.hasOrderby()) {
+            selectExpression.getOrderbyClause().accept(this, arg);
+        }
+
+        // visit limit
+        if (selectExpression.hasLimit()) {
+            selectExpression.getLimitClause().accept(this, arg);
+        }
+
+        // Exit scopes that were entered within this select expression
+        while (scopeChecker.getCurrentScope() != scopeBeforeSelectExpression) {
+            scopeChecker.removeCurrentScope();
+        }
+        return selectExpression;
+    }
+
+    @Override
+    public Expression visit(QuantifiedExpression qe, Expression arg) throws AsterixException {
+        scopeChecker.createNewScope();
+        for (QuantifiedPair pair : qe.getQuantifiedList()) {
+            scopeChecker.getCurrentScope().addNewVarSymbolToScope(pair.getVarExpr().getVar());
+            pair.setExpr(pair.getExpr().accept(this, arg));
+        }
+        qe.setSatisfiesExpr(qe.getSatisfiesExpr().accept(this, arg));
+        scopeChecker.removeCurrentScope();
+        return qe;
+    }
+
+    @Override
+    public Expression visit(VariableExpr varExpr, Expression arg) throws AsterixException {
+        String varName = varExpr.getVar().getValue();
+        if (scopeChecker.isInForbiddenScopes(varName)) {
+            throw new AsterixException(
+                    "Inside limit clauses, it is disallowed to reference a variable having the same name as any variable bound in the same scope as the limit clause.");
+        }
+        Identifier ident = scopeChecker.lookupSymbol(varName);
+        if (ident != null) {
+            // Exists such an identifier, then this is a variable reference instead of a variable
+            // definition.
+            varExpr.setIsNewVar(false);
+            varExpr.setVar((VarIdentifier) ident);
+        }
+        return varExpr;
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d630d1a2/asterixdb/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/visitor/base/AbstractSqlppSimpleExpressionVisitor.java
----------------------------------------------------------------------
diff --git a/asterixdb/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/visitor/base/AbstractSqlppSimpleExpressionVisitor.java b/asterixdb/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/visitor/base/AbstractSqlppSimpleExpressionVisitor.java
new file mode 100644
index 0000000..18c2789
--- /dev/null
+++ b/asterixdb/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/visitor/base/AbstractSqlppSimpleExpressionVisitor.java
@@ -0,0 +1,347 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.lang.sqlpp.visitor.base;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.asterix.common.exceptions.AsterixException;
+import org.apache.asterix.lang.common.base.Expression;
+import org.apache.asterix.lang.common.clause.GroupbyClause;
+import org.apache.asterix.lang.common.clause.LetClause;
+import org.apache.asterix.lang.common.clause.LimitClause;
+import org.apache.asterix.lang.common.clause.OrderbyClause;
+import org.apache.asterix.lang.common.clause.WhereClause;
+import org.apache.asterix.lang.common.expression.CallExpr;
+import org.apache.asterix.lang.common.expression.FieldAccessor;
+import org.apache.asterix.lang.common.expression.FieldBinding;
+import org.apache.asterix.lang.common.expression.GbyVariableExpressionPair;
+import org.apache.asterix.lang.common.expression.IfExpr;
+import org.apache.asterix.lang.common.expression.IndexAccessor;
+import org.apache.asterix.lang.common.expression.ListConstructor;
+import org.apache.asterix.lang.common.expression.LiteralExpr;
+import org.apache.asterix.lang.common.expression.OperatorExpr;
+import org.apache.asterix.lang.common.expression.QuantifiedExpression;
+import org.apache.asterix.lang.common.expression.RecordConstructor;
+import org.apache.asterix.lang.common.expression.UnaryExpr;
+import org.apache.asterix.lang.common.expression.VariableExpr;
+import org.apache.asterix.lang.common.statement.FunctionDecl;
+import org.apache.asterix.lang.common.statement.Query;
+import org.apache.asterix.lang.common.struct.QuantifiedPair;
+import org.apache.asterix.lang.sqlpp.clause.AbstractBinaryCorrelateClause;
+import org.apache.asterix.lang.sqlpp.clause.FromClause;
+import org.apache.asterix.lang.sqlpp.clause.FromTerm;
+import org.apache.asterix.lang.sqlpp.clause.HavingClause;
+import org.apache.asterix.lang.sqlpp.clause.JoinClause;
+import org.apache.asterix.lang.sqlpp.clause.NestClause;
+import org.apache.asterix.lang.sqlpp.clause.Projection;
+import org.apache.asterix.lang.sqlpp.clause.SelectBlock;
+import org.apache.asterix.lang.sqlpp.clause.SelectClause;
+import org.apache.asterix.lang.sqlpp.clause.SelectElement;
+import org.apache.asterix.lang.sqlpp.clause.SelectRegular;
+import org.apache.asterix.lang.sqlpp.clause.SelectSetOperation;
+import org.apache.asterix.lang.sqlpp.clause.UnnestClause;
+import org.apache.asterix.lang.sqlpp.expression.SelectExpression;
+import org.apache.asterix.lang.sqlpp.struct.SetOperationRight;
+
+public class AbstractSqlppSimpleExpressionVisitor extends AbstractSqlppQueryExpressionVisitor<Expression, Expression> {
+
+    @Override
+    public Expression visit(FromClause fromClause, Expression arg) throws AsterixException {
+        for (FromTerm fromTerm : fromClause.getFromTerms()) {
+            fromTerm.accept(this, arg);
+        }
+        return null;
+    }
+
+    @Override
+    public Expression visit(FromTerm fromTerm, Expression arg) throws AsterixException {
+        // Visit the left expression of a from term.
+        fromTerm.setLeftExpression(fromTerm.getLeftExpression().accept(this, arg));
+
+        // Visits join/unnest/nest clauses.
+        for (AbstractBinaryCorrelateClause correlateClause : fromTerm.getCorrelateClauses()) {
+            correlateClause.accept(this, arg);
+        }
+        return null;
+    }
+
+    @Override
+    public Expression visit(JoinClause joinClause, Expression arg) throws AsterixException {
+        joinClause.setRightExpression(joinClause.getRightExpression().accept(this, arg));
+        joinClause.setConditionExpression(joinClause.getConditionExpression().accept(this, arg));
+        return null;
+    }
+
+    @Override
+    public Expression visit(NestClause nestClause, Expression arg) throws AsterixException {
+        nestClause.setRightExpression(nestClause.getRightExpression().accept(this, arg));
+        nestClause.setConditionExpression(nestClause.getConditionExpression().accept(this, arg));
+        return null;
+    }
+
+    @Override
+    public Expression visit(UnnestClause unnestClause, Expression arg) throws AsterixException {
+        unnestClause.setRightExpression(unnestClause.getRightExpression().accept(this, arg));
+        return null;
+    }
+
+    @Override
+    public Expression visit(Projection projection, Expression arg) throws AsterixException {
+        projection.setExpression(projection.getExpression().accept(this, arg));
+        return null;
+    }
+
+    @Override
+    public Expression visit(SelectBlock selectBlock, Expression arg) throws AsterixException {
+        // Traverses the select block in the order of "from", "let"s, "where",
+        // "group by", "let"s, "having" and "select".
+        if (selectBlock.hasFromClause()) {
+            selectBlock.getFromClause().accept(this, arg);
+        }
+        if (selectBlock.hasLetClauses()) {
+            List<LetClause> letList = selectBlock.getLetList();
+            for (LetClause letClause : letList) {
+                letClause.accept(this, arg);
+            }
+        }
+        if (selectBlock.hasWhereClause()) {
+            selectBlock.getWhereClause().accept(this, arg);
+        }
+        if (selectBlock.hasGroupbyClause()) {
+            selectBlock.getGroupbyClause().accept(this, arg);
+        }
+        if (selectBlock.hasLetClausesAfterGroupby()) {
+            List<LetClause> letListAfterGby = selectBlock.getLetListAfterGroupby();
+            for (LetClause letClauseAfterGby : letListAfterGby) {
+                letClauseAfterGby.accept(this, arg);
+            }
+        }
+        if (selectBlock.hasHavingClause()) {
+            selectBlock.getHavingClause().accept(this, arg);
+        }
+        selectBlock.getSelectClause().accept(this, arg);
+        return null;
+    }
+
+    @Override
+    public Expression visit(SelectClause selectClause, Expression arg) throws AsterixException {
+        if (selectClause.selectElement()) {
+            selectClause.getSelectElement().accept(this, arg);
+        }
+        if (selectClause.selectRegular()) {
+            selectClause.getSelectRegular().accept(this, arg);
+        }
+        return null;
+    }
+
+    @Override
+    public Expression visit(SelectElement selectElement, Expression arg) throws AsterixException {
+        selectElement.setExpression(selectElement.getExpression().accept(this, arg));
+        return null;
+    }
+
+    @Override
+    public Expression visit(SelectRegular selectRegular, Expression arg) throws AsterixException {
+        for (Projection projection : selectRegular.getProjections()) {
+            projection.accept(this, arg);
+        }
+        return null;
+    }
+
+    @Override
+    public Expression visit(SelectSetOperation selectSetOperation, Expression arg) throws AsterixException {
+        selectSetOperation.getLeftInput().accept(this, arg);
+        for (SetOperationRight right : selectSetOperation.getRightInputs()) {
+            right.getSetOperationRightInput().accept(this, arg);
+        }
+        return null;
+    }
+
+    @Override
+    public Expression visit(HavingClause havingClause, Expression arg) throws AsterixException {
+        havingClause.setFilterExpression(havingClause.getFilterExpression().accept(this, arg));
+        return null;
+    }
+
+    @Override
+    public Expression visit(Query q, Expression arg) throws AsterixException {
+        q.setBody(q.getBody().accept(this, arg));
+        return null;
+    }
+
+    @Override
+    public Expression visit(FunctionDecl fd, Expression arg) throws AsterixException {
+        fd.setFuncBody(fd.getFuncBody().accept(this, arg));
+        return null;
+    }
+
+    @Override
+    public Expression visit(WhereClause whereClause, Expression arg) throws AsterixException {
+        whereClause.setWhereExpr(whereClause.getWhereExpr().accept(this, arg));
+        return null;
+    }
+
+    @Override
+    public Expression visit(OrderbyClause oc, Expression arg) throws AsterixException {
+        List<Expression> newOrderbyList = new ArrayList<Expression>();
+        for (Expression orderExpr : oc.getOrderbyList()) {
+            newOrderbyList.add(orderExpr.accept(this, arg));
+        }
+        oc.setOrderbyList(newOrderbyList);
+        return null;
+    }
+
+    @Override
+    public Expression visit(GroupbyClause gc, Expression arg) throws AsterixException {
+        for (GbyVariableExpressionPair gbyVarExpr : gc.getGbyPairList()) {
+            gbyVarExpr.setExpr(gbyVarExpr.getExpr().accept(this, arg));
+        }
+        return null;
+    }
+
+    @Override
+    public Expression visit(LimitClause limitClause, Expression arg) throws AsterixException {
+        limitClause.setLimitExpr(limitClause.getLimitExpr().accept(this, arg));
+        if (limitClause.hasOffset()) {
+            limitClause.setOffset(limitClause.getOffset().accept(this, arg));
+        }
+        return null;
+    }
+
+    @Override
+    public Expression visit(LetClause letClause, Expression arg) throws AsterixException {
+        letClause.setBindingExpr(letClause.getBindingExpr().accept(this, arg));
+        return null;
+    }
+
+    @Override
+    public Expression visit(SelectExpression selectExpression, Expression arg) throws AsterixException {
+        // visit let list
+        if (selectExpression.hasLetClauses()) {
+            for (LetClause letClause : selectExpression.getLetList()) {
+                letClause.accept(this, arg);
+            }
+        }
+
+        // visit the main select.
+        selectExpression.getSelectSetOperation().accept(this, arg);
+
+        // visit order by
+        if (selectExpression.hasOrderby()) {
+            for (Expression orderExpr : selectExpression.getOrderbyClause().getOrderbyList()) {
+                orderExpr.accept(this, arg);
+            }
+        }
+
+        // visit limit
+        if (selectExpression.hasLimit()) {
+            selectExpression.getLimitClause().accept(this, arg);
+        }
+        return selectExpression;
+    }
+
+    @Override
+    public Expression visit(LiteralExpr l, Expression arg) throws AsterixException {
+        return l;
+    }
+
+    @Override
+    public Expression visit(ListConstructor lc, Expression arg) throws AsterixException {
+        List<Expression> newExprList = new ArrayList<Expression>();
+        for (Expression expr : lc.getExprList()) {
+            newExprList.add(expr.accept(this, arg));
+        }
+        lc.setExprList(newExprList);
+        return lc;
+    }
+
+    @Override
+    public Expression visit(RecordConstructor rc, Expression arg) throws AsterixException {
+        for (FieldBinding binding : rc.getFbList()) {
+            binding.setLeftExpr(binding.getLeftExpr().accept(this, arg));
+            binding.setRightExpr(binding.getRightExpr().accept(this, arg));
+        }
+        return rc;
+    }
+
+    @Override
+    public Expression visit(OperatorExpr operatorExpr, Expression arg) throws AsterixException {
+        List<Expression> newExprList = new ArrayList<Expression>();
+        for (Expression expr : operatorExpr.getExprList()) {
+            newExprList.add(expr.accept(this, arg));
+        }
+        operatorExpr.setExprList(newExprList);
+        return operatorExpr;
+    }
+
+    @Override
+    public Expression visit(IfExpr ifExpr, Expression arg) throws AsterixException {
+        ifExpr.setCondExpr(ifExpr.getCondExpr().accept(this, arg));
+        ifExpr.setThenExpr(ifExpr.getThenExpr().accept(this, arg));
+        ifExpr.setElseExpr(ifExpr.getElseExpr().accept(this, arg));
+        return ifExpr;
+    }
+
+    @Override
+    public Expression visit(QuantifiedExpression qe, Expression arg) throws AsterixException {
+        for (QuantifiedPair pair : qe.getQuantifiedList()) {
+            pair.setExpr(pair.getExpr().accept(this, arg));
+        }
+        qe.setSatisfiesExpr(qe.getSatisfiesExpr().accept(this, arg));
+        return qe;
+    }
+
+    @Override
+    public Expression visit(CallExpr callExpr, Expression arg) throws AsterixException {
+        List<Expression> newExprList = new ArrayList<Expression>();
+        for (Expression expr : callExpr.getExprList()) {
+            newExprList.add(expr.accept(this, arg));
+        }
+        callExpr.setExprList(newExprList);
+        return callExpr;
+    }
+
+    @Override
+    public Expression visit(VariableExpr varExpr, Expression arg) throws AsterixException {
+        return varExpr;
+    }
+
+    @Override
+    public Expression visit(UnaryExpr u, Expression arg) throws AsterixException {
+        u.setExpr(u.getExpr().accept(this, arg));
+        return u;
+    }
+
+    @Override
+    public Expression visit(FieldAccessor fa, Expression arg) throws AsterixException {
+        fa.setExpr(fa.getExpr().accept(this, arg));
+        return fa;
+    }
+
+    @Override
+    public Expression visit(IndexAccessor ia, Expression arg) throws AsterixException {
+        ia.setExpr(ia.getExpr().accept(this, arg));
+        if (ia.getIndexExpr() != null) {
+            ia.setIndexExpr(ia.getIndexExpr());
+        }
+        return ia;
+    }
+
+}



[18/50] [abbrv] incubator-asterixdb git commit: Merge branch 'master' into hyracks-merge2

Posted by im...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/context/DatasetLifecycleManager.java
----------------------------------------------------------------------
diff --cc asterixdb/asterix-common/src/main/java/org/apache/asterix/common/context/DatasetLifecycleManager.java
index f667bd8,0000000..c5f6915
mode 100644,000000..100644
--- a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/context/DatasetLifecycleManager.java
+++ b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/context/DatasetLifecycleManager.java
@@@ -1,762 -1,0 +1,771 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +package org.apache.asterix.common.context;
 +
 +import java.io.IOException;
 +import java.io.OutputStream;
 +import java.util.ArrayList;
 +import java.util.Collections;
 +import java.util.HashMap;
 +import java.util.HashSet;
 +import java.util.List;
 +import java.util.Map;
 +import java.util.Set;
 +
 +import org.apache.asterix.common.api.IDatasetLifecycleManager;
 +import org.apache.asterix.common.api.ILocalResourceMetadata;
 +import org.apache.asterix.common.config.AsterixStorageProperties;
 +import org.apache.asterix.common.exceptions.ACIDException;
 +import org.apache.asterix.common.ioopcallbacks.AbstractLSMIOOperationCallback;
 +import org.apache.asterix.common.transactions.ILogManager;
 +import org.apache.asterix.common.transactions.LogRecord;
 +import org.apache.asterix.common.utils.TransactionUtil;
 +import org.apache.hyracks.api.exceptions.HyracksDataException;
 +import org.apache.hyracks.api.lifecycle.ILifeCycleComponent;
 +import org.apache.hyracks.storage.am.common.api.IIndex;
 +import org.apache.hyracks.storage.am.common.impls.NoOpOperationCallback;
 +import org.apache.hyracks.storage.am.lsm.common.api.ILSMIndex;
 +import org.apache.hyracks.storage.am.lsm.common.api.ILSMIndexAccessor;
 +import org.apache.hyracks.storage.am.lsm.common.api.ILSMOperationTracker;
 +import org.apache.hyracks.storage.am.lsm.common.api.IVirtualBufferCache;
 +import org.apache.hyracks.storage.am.lsm.common.impls.AbstractLSMIndex;
 +import org.apache.hyracks.storage.am.lsm.common.impls.MultitenantVirtualBufferCache;
 +import org.apache.hyracks.storage.am.lsm.common.impls.VirtualBufferCache;
 +import org.apache.hyracks.storage.common.buffercache.ResourceHeapBufferAllocator;
 +import org.apache.hyracks.storage.common.file.ILocalResourceRepository;
 +import org.apache.hyracks.storage.common.file.LocalResource;
 +
 +public class DatasetLifecycleManager implements IDatasetLifecycleManager, ILifeCycleComponent {
 +    private final AsterixStorageProperties storageProperties;
 +    private final Map<Integer, DatasetVirtualBufferCaches> datasetVirtualBufferCachesMap;
 +    private final Map<Integer, ILSMOperationTracker> datasetOpTrackers;
 +    private final Map<Integer, DatasetInfo> datasetInfos;
 +    private final ILocalResourceRepository resourceRepository;
 +    private final int firstAvilableUserDatasetID;
 +    private final long capacity;
 +    private long used;
 +    private final ILogManager logManager;
 +    private final LogRecord logRecord;
 +    private final int numPartitions;
 +
 +    public DatasetLifecycleManager(AsterixStorageProperties storageProperties,
-                                    ILocalResourceRepository resourceRepository, int firstAvilableUserDatasetID,
-                                    ILogManager logManager, int numPartitions) {
++            ILocalResourceRepository resourceRepository, int firstAvilableUserDatasetID, ILogManager logManager,
++            int numPartitions) {
 +        this.logManager = logManager;
 +        this.storageProperties = storageProperties;
 +        this.resourceRepository = resourceRepository;
 +        this.firstAvilableUserDatasetID = firstAvilableUserDatasetID;
 +        this.numPartitions = numPartitions;
 +        datasetVirtualBufferCachesMap = new HashMap<>();
 +        datasetOpTrackers = new HashMap<Integer, ILSMOperationTracker>();
 +        datasetInfos = new HashMap<Integer, DatasetInfo>();
 +        capacity = storageProperties.getMemoryComponentGlobalBudget();
 +        used = 0;
 +        logRecord = new LogRecord();
 +    }
 +
 +    @Override
 +    public synchronized IIndex getIndex(String resourcePath) throws HyracksDataException {
 +        int datasetID = getDIDfromResourcePath(resourcePath);
 +        long resourceID = getResourceIDfromResourcePath(resourcePath);
 +        return getIndex(datasetID, resourceID);
 +    }
 +
 +    @Override
 +    public synchronized IIndex getIndex(int datasetID, long resourceID) throws HyracksDataException {
 +        DatasetInfo dsInfo = datasetInfos.get(datasetID);
 +        if (dsInfo == null) {
 +            return null;
 +        }
 +        IndexInfo iInfo = dsInfo.indexes.get(resourceID);
 +        if (iInfo == null) {
 +            return null;
 +        }
 +        return iInfo.index;
 +    }
 +
 +    @Override
 +    public synchronized void register(String resourcePath, IIndex index) throws HyracksDataException {
 +        int did = getDIDfromResourcePath(resourcePath);
 +        long resourceID = getResourceIDfromResourcePath(resourcePath);
 +        DatasetInfo dsInfo = datasetInfos.get(did);
 +        if (dsInfo == null) {
 +            dsInfo = getDatasetInfo(did);
 +        }
 +        if (!dsInfo.isRegistered) {
 +            dsInfo.isExternal = !index.hasMemoryComponents();
 +            dsInfo.isRegistered = true;
++            dsInfo.durable = ((ILSMIndex) index).isDurable();
 +        }
 +
 +        if (dsInfo.indexes.containsKey(resourceID)) {
 +            throw new HyracksDataException("Index with resource ID " + resourceID + " already exists.");
 +        }
 +        dsInfo.indexes.put(resourceID, new IndexInfo((ILSMIndex) index, dsInfo.datasetID, resourceID));
 +    }
 +
 +    public int getDIDfromResourcePath(String resourcePath) throws HyracksDataException {
 +        LocalResource lr = resourceRepository.getResourceByPath(resourcePath);
 +        if (lr == null) {
 +            return -1;
 +        }
 +        return ((ILocalResourceMetadata) lr.getResourceObject()).getDatasetID();
 +    }
 +
 +    public long getResourceIDfromResourcePath(String resourcePath) throws HyracksDataException {
 +        LocalResource lr = resourceRepository.getResourceByPath(resourcePath);
 +        if (lr == null) {
 +            return -1;
 +        }
 +        return lr.getResourceId();
 +    }
 +
 +    @Override
 +    public synchronized void unregister(String resourcePath) throws HyracksDataException {
 +        int did = getDIDfromResourcePath(resourcePath);
 +        long resourceID = getResourceIDfromResourcePath(resourcePath);
 +
 +        DatasetInfo dsInfo = datasetInfos.get(did);
 +        IndexInfo iInfo = dsInfo == null ? null : dsInfo.indexes.get(resourceID);
 +
 +        if (dsInfo == null || iInfo == null) {
 +            throw new HyracksDataException("Index with resource ID " + resourceID + " does not exist.");
 +        }
 +
 +        PrimaryIndexOperationTracker opTracker = (PrimaryIndexOperationTracker) datasetOpTrackers.get(dsInfo.datasetID);
 +        if (iInfo.referenceCount != 0 || (opTracker != null && opTracker.getNumActiveOperations() != 0)) {
 +            throw new HyracksDataException("Cannot remove index while it is open. (Dataset reference count = "
 +                    + iInfo.referenceCount + ", Operation tracker number of active operations = "
 +                    + opTracker.getNumActiveOperations() + ")");
 +        }
 +
 +        // TODO: use fine-grained counters, one for each index instead of a single counter per dataset.
 +        // First wait for any ongoing IO operations
 +        synchronized (dsInfo) {
 +            while (dsInfo.numActiveIOOps > 0) {
 +                try {
 +                    //notification will come from DatasetInfo class (undeclareActiveIOOperation)
 +                    dsInfo.wait();
 +                } catch (InterruptedException e) {
 +                    throw new HyracksDataException(e);
 +                }
 +            }
 +        }
 +
 +        // Flush and wait for it to finish, it is separated from the above wait so they don't deadlock each other.
 +        // TODO: Find a better way to do this.
 +        flushAndWaitForIO(dsInfo, iInfo);
 +
 +        if (iInfo.isOpen) {
 +            ILSMOperationTracker indexOpTracker = iInfo.index.getOperationTracker();
 +            synchronized (indexOpTracker) {
 +                iInfo.index.deactivate(false);
 +            }
 +        }
 +
 +        dsInfo.indexes.remove(resourceID);
 +        if (dsInfo.referenceCount == 0 && dsInfo.isOpen && dsInfo.indexes.isEmpty() && !dsInfo.isExternal) {
 +            removeDatasetFromCache(dsInfo.datasetID);
 +        }
 +    }
 +
 +    @Override
 +    public synchronized void open(String resourcePath) throws HyracksDataException {
 +        int did = getDIDfromResourcePath(resourcePath);
 +        long resourceID = getResourceIDfromResourcePath(resourcePath);
 +
 +        DatasetInfo dsInfo = datasetInfos.get(did);
 +        if (dsInfo == null || !dsInfo.isRegistered) {
 +            throw new HyracksDataException(
 +                    "Failed to open index with resource ID " + resourceID + " since it does not exist.");
 +        }
 +
 +        IndexInfo iInfo = dsInfo.indexes.get(resourceID);
 +        if (iInfo == null) {
 +            throw new HyracksDataException(
 +                    "Failed to open index with resource ID " + resourceID + " since it does not exist.");
 +        }
 +        if (!dsInfo.isOpen && !dsInfo.isExternal) {
 +            initializeDatasetVirtualBufferCache(did);
 +        }
 +
 +        dsInfo.isOpen = true;
 +        dsInfo.touch();
 +        if (!iInfo.isOpen) {
 +            ILSMOperationTracker opTracker = iInfo.index.getOperationTracker();
 +            synchronized (opTracker) {
 +                iInfo.index.activate();
 +            }
 +            iInfo.isOpen = true;
 +        }
 +        iInfo.touch();
 +    }
 +
 +    private boolean evictCandidateDataset() throws HyracksDataException {
 +        /**
 +         * We will take a dataset that has no active transactions, it is open (a dataset consuming memory),
 +         * that is not being used (refcount == 0) and has been least recently used, excluding metadata datasets.
 +         * The sort order defined for DatasetInfo maintains this. See DatasetInfo.compareTo().
 +         */
 +        List<DatasetInfo> datasetInfosList = new ArrayList<DatasetInfo>(datasetInfos.values());
 +        Collections.sort(datasetInfosList);
 +        for (DatasetInfo dsInfo : datasetInfosList) {
 +            PrimaryIndexOperationTracker opTracker = (PrimaryIndexOperationTracker) datasetOpTrackers
 +                    .get(dsInfo.datasetID);
 +            if (opTracker != null && opTracker.getNumActiveOperations() == 0 && dsInfo.referenceCount == 0
 +                    && dsInfo.isOpen && dsInfo.datasetID >= firstAvilableUserDatasetID) {
 +                closeDataset(dsInfo);
 +                return true;
 +            }
 +        }
 +        return false;
 +    }
 +
 +    private static void flushAndWaitForIO(DatasetInfo dsInfo, IndexInfo iInfo) throws HyracksDataException {
 +        if (iInfo.isOpen) {
 +            ILSMIndexAccessor accessor = iInfo.index.createAccessor(NoOpOperationCallback.INSTANCE,
 +                    NoOpOperationCallback.INSTANCE);
 +            accessor.scheduleFlush(iInfo.index.getIOOperationCallback());
 +        }
 +
 +        // Wait for the above flush op.
 +        synchronized (dsInfo) {
 +            while (dsInfo.numActiveIOOps > 0) {
 +                try {
 +                    //notification will come from DatasetInfo class (undeclareActiveIOOperation)
 +                    dsInfo.wait();
 +                } catch (InterruptedException e) {
 +                    throw new HyracksDataException(e);
 +                }
 +            }
 +        }
 +    }
 +
 +    @Override
 +    public DatasetInfo getDatasetInfo(int datasetID) {
 +        synchronized (datasetInfos) {
 +            DatasetInfo dsInfo = datasetInfos.get(datasetID);
 +            if (dsInfo == null) {
 +                dsInfo = new DatasetInfo(datasetID);
 +                datasetInfos.put(datasetID, dsInfo);
 +            }
 +            return dsInfo;
 +        }
 +    }
 +
 +    @Override
 +    public synchronized void close(String resourcePath) throws HyracksDataException {
 +        int did = getDIDfromResourcePath(resourcePath);
 +        long resourceID = getResourceIDfromResourcePath(resourcePath);
 +
 +        DatasetInfo dsInfo = datasetInfos.get(did);
 +        if (dsInfo == null) {
 +            throw new HyracksDataException("No index found with resourceID " + resourceID);
 +        }
 +        IndexInfo iInfo = dsInfo.indexes.get(resourceID);
 +        if (iInfo == null) {
 +            throw new HyracksDataException("No index found with resourceID " + resourceID);
 +        }
 +        iInfo.untouch();
 +        dsInfo.untouch();
 +    }
 +
 +    @Override
 +    public synchronized List<IIndex> getOpenIndexes() {
 +        List<IndexInfo> openIndexesInfo = getOpenIndexesInfo();
 +        List<IIndex> openIndexes = new ArrayList<IIndex>();
 +        for (IndexInfo iInfo : openIndexesInfo) {
 +            openIndexes.add(iInfo.index);
 +        }
 +        return openIndexes;
 +    }
 +
 +    @Override
 +    public synchronized List<IndexInfo> getOpenIndexesInfo() {
 +        List<IndexInfo> openIndexesInfo = new ArrayList<IndexInfo>();
 +        for (DatasetInfo dsInfo : datasetInfos.values()) {
 +            for (IndexInfo iInfo : dsInfo.indexes.values()) {
 +                if (iInfo.isOpen) {
 +                    openIndexesInfo.add(iInfo);
 +                }
 +            }
 +        }
 +        return openIndexesInfo;
 +    }
 +
 +    private DatasetVirtualBufferCaches getVirtualBufferCaches(int datasetID) {
 +        synchronized (datasetVirtualBufferCachesMap) {
 +            DatasetVirtualBufferCaches vbcs = datasetVirtualBufferCachesMap.get(datasetID);
 +            if (vbcs == null) {
 +                vbcs = initializeDatasetVirtualBufferCache(datasetID);
 +            }
 +            return vbcs;
 +        }
 +    }
 +
 +    @Override
 +    public List<IVirtualBufferCache> getVirtualBufferCaches(int datasetID, int ioDeviceNum) {
 +        DatasetVirtualBufferCaches dvbcs = getVirtualBufferCaches(datasetID);
 +        return dvbcs.getVirtualBufferCaches(ioDeviceNum);
 +    }
 +
 +    private void removeDatasetFromCache(int datasetID) throws HyracksDataException {
 +        deallocateDatasetMemory(datasetID);
 +        datasetInfos.remove(datasetID);
 +        datasetVirtualBufferCachesMap.remove(datasetID);
 +        datasetOpTrackers.remove(datasetID);
 +    }
 +
 +    private DatasetVirtualBufferCaches initializeDatasetVirtualBufferCache(int datasetID) {
 +        synchronized (datasetVirtualBufferCachesMap) {
 +            DatasetVirtualBufferCaches dvbcs = new DatasetVirtualBufferCaches(datasetID);
 +            datasetVirtualBufferCachesMap.put(datasetID, dvbcs);
 +            return dvbcs;
 +        }
 +    }
++
 +    @Override
 +    public ILSMOperationTracker getOperationTracker(int datasetID) {
 +        synchronized (datasetOpTrackers) {
 +            ILSMOperationTracker opTracker = datasetOpTrackers.get(datasetID);
 +            if (opTracker == null) {
 +                opTracker = new PrimaryIndexOperationTracker(datasetID, logManager, getDatasetInfo(datasetID));
 +                datasetOpTrackers.put(datasetID, opTracker);
 +            }
 +            return opTracker;
 +        }
 +    }
 +
 +    private static abstract class Info {
 +        protected int referenceCount;
 +        protected boolean isOpen;
 +
 +        public Info() {
 +            referenceCount = 0;
 +            isOpen = false;
 +        }
 +
 +        public void touch() {
 +            ++referenceCount;
 +        }
 +
 +        public void untouch() {
 +            --referenceCount;
 +        }
 +    }
 +
 +    public static class IndexInfo extends Info {
 +        private final ILSMIndex index;
 +        private final long resourceId;
 +        private final int datasetId;
 +
 +        public IndexInfo(ILSMIndex index, int datasetId, long resourceId) {
 +            this.index = index;
 +            this.datasetId = datasetId;
 +            this.resourceId = resourceId;
 +        }
 +
 +        public ILSMIndex getIndex() {
 +            return index;
 +        }
 +
 +        public long getResourceId() {
 +            return resourceId;
 +        }
 +
 +        public int getDatasetId() {
 +            return datasetId;
 +        }
 +    }
 +
 +    public static class DatasetInfo extends Info implements Comparable<DatasetInfo> {
 +        private final Map<Long, IndexInfo> indexes;
 +        private final int datasetID;
 +        private long lastAccess;
 +        private int numActiveIOOps;
 +        private boolean isExternal;
 +        private boolean isRegistered;
 +        private boolean memoryAllocated;
++        private boolean durable;
 +
 +        public DatasetInfo(int datasetID) {
 +            this.indexes = new HashMap<Long, IndexInfo>();
 +            this.lastAccess = -1;
 +            this.datasetID = datasetID;
 +            this.isRegistered = false;
 +            this.memoryAllocated = false;
 +        }
 +
 +        @Override
 +        public void touch() {
 +            super.touch();
 +            lastAccess = System.currentTimeMillis();
 +        }
 +
 +        @Override
 +        public void untouch() {
 +            super.untouch();
 +            lastAccess = System.currentTimeMillis();
 +        }
 +
 +        public synchronized void declareActiveIOOperation() {
 +            numActiveIOOps++;
 +        }
 +
 +        public synchronized void undeclareActiveIOOperation() {
 +            numActiveIOOps--;
 +            //notify threads waiting on this dataset info
 +            notifyAll();
 +        }
 +
 +        public synchronized Set<ILSMIndex> getDatasetIndexes() {
 +            Set<ILSMIndex> datasetIndexes = new HashSet<ILSMIndex>();
 +            for (IndexInfo iInfo : indexes.values()) {
 +                if (iInfo.isOpen) {
 +                    datasetIndexes.add(iInfo.index);
 +                }
 +            }
 +
 +            return datasetIndexes;
 +        }
 +
 +        @Override
 +        public int compareTo(DatasetInfo i) {
 +            // sort by (isOpen, referenceCount, lastAccess) ascending, where true < false
 +            //
 +            // Example sort order:
 +            // -------------------
 +            // (F, 0, 70)       <-- largest
 +            // (F, 0, 60)
 +            // (T, 10, 80)
 +            // (T, 10, 70)
 +            // (T, 9, 90)
 +            // (T, 0, 100)      <-- smallest
 +            if (isOpen && !i.isOpen) {
 +                return -1;
 +            } else if (!isOpen && i.isOpen) {
 +                return 1;
 +            } else {
 +                if (referenceCount < i.referenceCount) {
 +                    return -1;
 +                } else if (referenceCount > i.referenceCount) {
 +                    return 1;
 +                } else {
 +                    if (lastAccess < i.lastAccess) {
 +                        return -1;
 +                    } else if (lastAccess > i.lastAccess) {
 +                        return 1;
 +                    } else {
 +                        return 0;
 +                    }
 +                }
 +            }
 +
 +        }
 +
 +        @Override
 +        public String toString() {
 +            return "DatasetID: " + datasetID + ", isOpen: " + isOpen + ", refCount: " + referenceCount
 +                    + ", lastAccess: " + lastAccess + ", isRegistered: " + isRegistered + ", memoryAllocated: "
-                     + memoryAllocated;
++                    + memoryAllocated + ", isDurable: " + durable;
++        }
++
++        public boolean isDurable() {
++            return durable;
 +        }
 +    }
 +
 +    @Override
 +    public synchronized void start() {
 +        used = 0;
 +    }
 +
 +    @Override
 +    public synchronized void flushAllDatasets() throws HyracksDataException {
 +        for (DatasetInfo dsInfo : datasetInfos.values()) {
 +            flushDatasetOpenIndexes(dsInfo, false);
 +        }
 +    }
 +
 +    @Override
 +    public synchronized void flushDataset(int datasetId, boolean asyncFlush) throws HyracksDataException {
 +        DatasetInfo datasetInfo = datasetInfos.get(datasetId);
 +        if (datasetInfo != null) {
 +            flushDatasetOpenIndexes(datasetInfo, asyncFlush);
 +        }
 +    }
 +
 +    @Override
 +    public synchronized void scheduleAsyncFlushForLaggingDatasets(long targetLSN) throws HyracksDataException {
 +        //schedule flush for datasets with min LSN (Log Serial Number) < targetLSN
 +        for (DatasetInfo dsInfo : datasetInfos.values()) {
 +            PrimaryIndexOperationTracker opTracker = (PrimaryIndexOperationTracker) getOperationTracker(
 +                    dsInfo.datasetID);
 +            synchronized (opTracker) {
 +                for (IndexInfo iInfo : dsInfo.indexes.values()) {
 +                    AbstractLSMIOOperationCallback ioCallback = (AbstractLSMIOOperationCallback) iInfo.index
 +                            .getIOOperationCallback();
 +                    if (!(((AbstractLSMIndex) iInfo.index).isCurrentMutableComponentEmpty()
 +                            || ioCallback.hasPendingFlush() || opTracker.isFlushLogCreated()
 +                            || opTracker.isFlushOnExit())) {
 +                        long firstLSN = ioCallback.getFirstLSN();
 +                        if (firstLSN < targetLSN) {
 +                            opTracker.setFlushOnExit(true);
 +                            if (opTracker.getNumActiveOperations() == 0) {
 +                                // No Modify operations currently, we need to trigger the flush and we can do so safely
 +                                opTracker.flushIfRequested();
 +                            }
 +                            break;
 +                        }
 +                    }
 +                }
 +            }
 +        }
 +    }
 +
 +    /*
 +     * This method can only be called asynchronously safely if we're sure no modify operation will take place until the flush is scheduled
 +     */
 +    private void flushDatasetOpenIndexes(DatasetInfo dsInfo, boolean asyncFlush) throws HyracksDataException {
-         if (!dsInfo.isExternal) {
++        if (!dsInfo.isExternal && dsInfo.durable) {
 +            synchronized (logRecord) {
 +                TransactionUtil.formFlushLogRecord(logRecord, dsInfo.datasetID, null, logManager.getNodeId(),
 +                        dsInfo.indexes.size());
 +                try {
 +                    logManager.log(logRecord);
 +                } catch (ACIDException e) {
 +                    throw new HyracksDataException("could not write flush log while closing dataset", e);
 +                }
 +
 +                try {
 +                    //notification will come from LogPage class (notifyFlushTerminator)
 +                    logRecord.wait();
 +                } catch (InterruptedException e) {
 +                    throw new HyracksDataException(e);
 +                }
 +            }
 +            for (IndexInfo iInfo : dsInfo.indexes.values()) {
 +                //update resource lsn
 +                AbstractLSMIOOperationCallback ioOpCallback = (AbstractLSMIOOperationCallback) iInfo.index
 +                        .getIOOperationCallback();
 +                ioOpCallback.updateLastLSN(logRecord.getLSN());
 +            }
 +        }
 +
 +        if (asyncFlush) {
 +            for (IndexInfo iInfo : dsInfo.indexes.values()) {
 +                ILSMIndexAccessor accessor = iInfo.index.createAccessor(NoOpOperationCallback.INSTANCE,
 +                        NoOpOperationCallback.INSTANCE);
 +                accessor.scheduleFlush(iInfo.index.getIOOperationCallback());
 +            }
 +        } else {
 +            for (IndexInfo iInfo : dsInfo.indexes.values()) {
 +                // TODO: This is not efficient since we flush the indexes sequentially.
 +                // Think of a way to allow submitting the flush requests concurrently. We don't do them concurrently because this
 +                // may lead to a deadlock scenario between the DatasetLifeCycleManager and the PrimaryIndexOperationTracker.
 +                flushAndWaitForIO(dsInfo, iInfo);
 +            }
 +        }
 +    }
 +
 +    private void closeDataset(DatasetInfo dsInfo) throws HyracksDataException {
 +        // First wait for any ongoing IO operations
 +        synchronized (dsInfo) {
 +            while (dsInfo.numActiveIOOps > 0) {
 +                try {
 +                    dsInfo.wait();
 +                } catch (InterruptedException e) {
 +                    throw new HyracksDataException(e);
 +                }
 +            }
 +        }
 +        try {
 +            flushDatasetOpenIndexes(dsInfo, false);
 +        } catch (Exception e) {
 +            throw new HyracksDataException(e);
 +        }
 +        for (IndexInfo iInfo : dsInfo.indexes.values()) {
 +            if (iInfo.isOpen) {
 +                ILSMOperationTracker opTracker = iInfo.index.getOperationTracker();
 +                synchronized (opTracker) {
 +                    iInfo.index.deactivate(false);
 +                }
 +                iInfo.isOpen = false;
 +            }
 +            assert iInfo.referenceCount == 0;
 +        }
 +        removeDatasetFromCache(dsInfo.datasetID);
 +        dsInfo.isOpen = false;
 +    }
 +
 +    @Override
 +    public synchronized void closeAllDatasets() throws HyracksDataException {
 +        List<DatasetInfo> openDatasets = new ArrayList<>(datasetInfos.values());
 +        for (DatasetInfo dsInfo : openDatasets) {
 +            closeDataset(dsInfo);
 +        }
 +    }
 +
 +    @Override
 +    public synchronized void closeUserDatasets() throws HyracksDataException {
 +        List<DatasetInfo> openDatasets = new ArrayList<>(datasetInfos.values());
 +        for (DatasetInfo dsInfo : openDatasets) {
 +            if (dsInfo.datasetID >= firstAvilableUserDatasetID) {
 +                closeDataset(dsInfo);
 +            }
 +        }
 +    }
 +
 +    @Override
 +    public synchronized void stop(boolean dumpState, OutputStream outputStream) throws IOException {
 +        if (dumpState) {
 +            dumpState(outputStream);
 +        }
 +
 +        closeAllDatasets();
 +
 +        datasetVirtualBufferCachesMap.clear();
 +        datasetOpTrackers.clear();
 +        datasetInfos.clear();
 +    }
 +
 +    @Override
 +    public void dumpState(OutputStream outputStream) throws IOException {
 +        StringBuilder sb = new StringBuilder();
 +
 +        sb.append(String.format("Memory budget = %d\n", capacity));
 +        sb.append(String.format("Memory used = %d\n", used));
 +        sb.append("\n");
 +
 +        String dsHeaderFormat = "%-10s %-6s %-16s %-12s\n";
 +        String dsFormat = "%-10d %-6b %-16d %-12d\n";
 +        String idxHeaderFormat = "%-10s %-11s %-6s %-16s %-6s\n";
 +        String idxFormat = "%-10d %-11d %-6b %-16d %-6s\n";
 +
 +        sb.append("[Datasets]\n");
 +        sb.append(String.format(dsHeaderFormat, "DatasetID", "Open", "Reference Count", "Last Access"));
 +        for (DatasetInfo dsInfo : datasetInfos.values()) {
 +            sb.append(
 +                    String.format(dsFormat, dsInfo.datasetID, dsInfo.isOpen, dsInfo.referenceCount, dsInfo.lastAccess));
 +        }
 +        sb.append("\n");
 +
 +        sb.append("[Indexes]\n");
 +        sb.append(String.format(idxHeaderFormat, "DatasetID", "ResourceID", "Open", "Reference Count", "Index"));
 +        for (DatasetInfo dsInfo : datasetInfos.values()) {
 +            for (Map.Entry<Long, IndexInfo> entry : dsInfo.indexes.entrySet()) {
 +                IndexInfo iInfo = entry.getValue();
 +                sb.append(String.format(idxFormat, dsInfo.datasetID, entry.getKey(), iInfo.isOpen, iInfo.referenceCount,
 +                        iInfo.index));
 +            }
 +        }
 +
 +        outputStream.write(sb.toString().getBytes());
 +    }
 +
 +    private synchronized void allocateDatasetMemory(int datasetId) throws HyracksDataException {
 +        DatasetInfo dsInfo = datasetInfos.get(datasetId);
 +        if (dsInfo == null) {
 +            throw new HyracksDataException(
 +                    "Failed to allocate memory for dataset with ID " + datasetId + " since it is not open.");
 +        }
 +        synchronized (dsInfo) {
 +            // This is not needed for external datasets' indexes since they never use the virtual buffer cache.
 +            if (!dsInfo.memoryAllocated && !dsInfo.isExternal) {
 +                long additionalSize = getVirtualBufferCaches(dsInfo.datasetID).getTotalSize();
 +                while (used + additionalSize > capacity) {
 +                    if (!evictCandidateDataset()) {
 +                        throw new HyracksDataException("Cannot allocate dataset " + dsInfo.datasetID
 +                                + " memory since memory budget would be exceeded.");
 +                    }
 +                }
 +                used += additionalSize;
 +                dsInfo.memoryAllocated = true;
 +            }
 +        }
 +    }
 +
 +    private synchronized void deallocateDatasetMemory(int datasetId) throws HyracksDataException {
 +        DatasetInfo dsInfo = datasetInfos.get(datasetId);
 +        if (dsInfo == null) {
 +            throw new HyracksDataException(
 +                    "Failed to deallocate memory for dataset with ID " + datasetId + " since it is not open.");
 +        }
 +        synchronized (dsInfo) {
 +            if (dsInfo.isOpen && dsInfo.memoryAllocated) {
 +                used -= getVirtualBufferCaches(dsInfo.datasetID).getTotalSize();
 +                dsInfo.memoryAllocated = false;
 +            }
 +        }
 +    }
 +
 +    @Override
 +    public synchronized void allocateMemory(String resourcePath) throws HyracksDataException {
 +        //a resource name in the case of DatasetLifecycleManager is a dataset id which is passed to the ResourceHeapBufferAllocator.
 +        int did = Integer.parseInt(resourcePath);
 +        allocateDatasetMemory(did);
 +    }
 +
 +    private class DatasetVirtualBufferCaches {
 +        private final int datasetID;
 +        private final Map<Integer, List<IVirtualBufferCache>> ioDeviceVirtualBufferCaches = new HashMap<>();
 +
 +        public DatasetVirtualBufferCaches(int datasetID) {
 +            this.datasetID = datasetID;
 +        }
 +
 +        private List<IVirtualBufferCache> initializeVirtualBufferCaches(int ioDeviceNum) {
 +            assert ioDeviceVirtualBufferCaches.size() < numPartitions;
 +            int numPages = datasetID < firstAvilableUserDatasetID
 +                    ? storageProperties.getMetadataMemoryComponentNumPages()
 +                    : storageProperties.getMemoryComponentNumPages();
 +            List<IVirtualBufferCache> vbcs = new ArrayList<>();
 +            for (int i = 0; i < storageProperties.getMemoryComponentsNum(); i++) {
 +                MultitenantVirtualBufferCache vbc = new MultitenantVirtualBufferCache(
-                         new VirtualBufferCache(new ResourceHeapBufferAllocator(DatasetLifecycleManager.this,
-                                 Integer.toString(datasetID)), storageProperties.getMemoryComponentPageSize(),
++                        new VirtualBufferCache(
++                                new ResourceHeapBufferAllocator(DatasetLifecycleManager.this,
++                                        Integer.toString(datasetID)),
++                                storageProperties.getMemoryComponentPageSize(),
 +                                numPages / storageProperties.getMemoryComponentsNum() / numPartitions));
 +                vbcs.add(vbc);
 +            }
 +            ioDeviceVirtualBufferCaches.put(ioDeviceNum, vbcs);
 +            return vbcs;
 +        }
 +
 +        public List<IVirtualBufferCache> getVirtualBufferCaches(int ioDeviceNum) {
 +            synchronized (ioDeviceVirtualBufferCaches) {
 +                List<IVirtualBufferCache> vbcs = ioDeviceVirtualBufferCaches.get(ioDeviceNum);
 +                if (vbcs == null) {
 +                    vbcs = initializeVirtualBufferCaches(ioDeviceNum);
 +                }
 +                return vbcs;
 +            }
 +        }
 +
 +        public long getTotalSize() {
 +            int numPages = datasetID < firstAvilableUserDatasetID
 +                    ? storageProperties.getMetadataMemoryComponentNumPages()
 +                    : storageProperties.getMemoryComponentNumPages();
 +
 +            return storageProperties.getMemoryComponentPageSize() * numPages;
 +        }
 +    }
 +
 +}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/context/PrimaryIndexOperationTracker.java
----------------------------------------------------------------------
diff --cc asterixdb/asterix-common/src/main/java/org/apache/asterix/common/context/PrimaryIndexOperationTracker.java
index e5a3473,0000000..b3eb281
mode 100644,000000..100644
--- a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/context/PrimaryIndexOperationTracker.java
+++ b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/context/PrimaryIndexOperationTracker.java
@@@ -1,201 -1,0 +1,209 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +
 +package org.apache.asterix.common.context;
 +
 +import java.util.Set;
 +import java.util.concurrent.atomic.AtomicInteger;
 +
 +import org.apache.asterix.common.context.DatasetLifecycleManager.DatasetInfo;
 +import org.apache.asterix.common.exceptions.ACIDException;
 +import org.apache.asterix.common.ioopcallbacks.AbstractLSMIOOperationCallback;
 +import org.apache.asterix.common.transactions.AbstractOperationCallback;
 +import org.apache.asterix.common.transactions.ILogManager;
 +import org.apache.asterix.common.transactions.LogRecord;
 +import org.apache.asterix.common.utils.TransactionUtil;
 +import org.apache.hyracks.api.exceptions.HyracksDataException;
 +import org.apache.hyracks.storage.am.common.api.IModificationOperationCallback;
 +import org.apache.hyracks.storage.am.common.api.ISearchOperationCallback;
 +import org.apache.hyracks.storage.am.common.impls.NoOpOperationCallback;
 +import org.apache.hyracks.storage.am.lsm.common.api.ILSMComponent.ComponentState;
 +import org.apache.hyracks.storage.am.lsm.common.api.ILSMIndex;
 +import org.apache.hyracks.storage.am.lsm.common.api.ILSMIndexAccessor;
 +import org.apache.hyracks.storage.am.lsm.common.api.ILSMIndexInternal;
 +import org.apache.hyracks.storage.am.lsm.common.api.ILSMOperationTracker;
 +import org.apache.hyracks.storage.am.lsm.common.api.LSMOperationType;
 +import org.apache.hyracks.storage.am.lsm.common.impls.AbstractLSMIndex;
 +
 +public class PrimaryIndexOperationTracker extends BaseOperationTracker {
 +
 +    // Number of active operations on an ILSMIndex instance.
 +    private final AtomicInteger numActiveOperations;
 +    private final ILogManager logManager;
 +    private boolean flushOnExit = false;
 +    private boolean flushLogCreated = false;
 +
 +    public PrimaryIndexOperationTracker(int datasetID, ILogManager logManager, DatasetInfo dsInfo) {
 +        super(datasetID, dsInfo);
 +        this.logManager = logManager;
 +        this.numActiveOperations = new AtomicInteger();
 +    }
 +
 +    @Override
 +    public void beforeOperation(ILSMIndex index, LSMOperationType opType, ISearchOperationCallback searchCallback,
 +            IModificationOperationCallback modificationCallback) throws HyracksDataException {
 +        if (opType == LSMOperationType.MODIFICATION || opType == LSMOperationType.FORCE_MODIFICATION) {
 +            incrementNumActiveOperations(modificationCallback);
 +        } else if (opType == LSMOperationType.FLUSH || opType == LSMOperationType.MERGE
 +                || opType == LSMOperationType.REPLICATE) {
 +            dsInfo.declareActiveIOOperation();
 +        }
 +    }
 +
 +    @Override
 +    public void afterOperation(ILSMIndex index, LSMOperationType opType, ISearchOperationCallback searchCallback,
 +            IModificationOperationCallback modificationCallback) throws HyracksDataException {
 +        // Searches are immediately considered complete, because they should not prevent the execution of flushes.
 +        if (opType == LSMOperationType.FLUSH || opType == LSMOperationType.MERGE
 +                || opType == LSMOperationType.REPLICATE) {
 +            completeOperation(index, opType, searchCallback, modificationCallback);
 +        }
 +    }
 +
 +    @Override
 +    public synchronized void completeOperation(ILSMIndex index, LSMOperationType opType,
 +            ISearchOperationCallback searchCallback, IModificationOperationCallback modificationCallback)
 +                    throws HyracksDataException {
 +        if (opType == LSMOperationType.MODIFICATION || opType == LSMOperationType.FORCE_MODIFICATION) {
 +            decrementNumActiveOperations(modificationCallback);
 +            if (numActiveOperations.get() == 0) {
 +                flushIfRequested();
 +            } else if (numActiveOperations.get() < 0) {
 +                throw new HyracksDataException("The number of active operations cannot be negative!");
 +            }
 +        } else if (opType == LSMOperationType.FLUSH || opType == LSMOperationType.MERGE
 +                || opType == LSMOperationType.REPLICATE) {
 +            dsInfo.undeclareActiveIOOperation();
 +        }
 +    }
 +
 +    public void flushIfRequested() throws HyracksDataException {
 +        // If we need a flush, and this is the last completing operation, then schedule the flush,
 +        // or if there is a flush scheduled by the checkpoint (flushOnExit), then schedule it
 +
 +        boolean needsFlush = false;
 +        Set<ILSMIndex> indexes = dsInfo.getDatasetIndexes();
 +
 +        if (!flushOnExit) {
 +            for (ILSMIndex lsmIndex : indexes) {
 +                ILSMIndexInternal lsmIndexInternal = (ILSMIndexInternal) lsmIndex;
 +                if (lsmIndexInternal.hasFlushRequestForCurrentMutableComponent()) {
 +                    needsFlush = true;
 +                    break;
 +                }
 +            }
 +        }
 +
 +        if (needsFlush || flushOnExit) {
 +            //Make the current mutable components READABLE_UNWRITABLE to stop coming modify operations from entering them until the current flush is scheduled.
 +            for (ILSMIndex lsmIndex : indexes) {
 +                AbstractLSMIndex abstractLSMIndex = ((AbstractLSMIndex) lsmIndex);
 +                ILSMOperationTracker opTracker = abstractLSMIndex.getOperationTracker();
 +                synchronized (opTracker) {
 +                    if (abstractLSMIndex.getCurrentMutableComponentState() == ComponentState.READABLE_WRITABLE) {
 +                        abstractLSMIndex.setCurrentMutableComponentState(ComponentState.READABLE_UNWRITABLE);
 +                    }
 +                }
 +            }
 +            LogRecord logRecord = new LogRecord();
-             TransactionUtil.formFlushLogRecord(logRecord, datasetID, this, logManager.getNodeId(),
-                     dsInfo.getDatasetIndexes().size());
-             try {
-                 logManager.log(logRecord);
-             } catch (ACIDException e) {
-                 throw new HyracksDataException("could not write flush log", e);
-             }
- 
-             flushLogCreated = true;
 +            flushOnExit = false;
++            if (dsInfo.isDurable()) {
++                /**
++                 * Generate a FLUSH log.
++                 * Flush will be triggered when the log is written to disk by LogFlusher.
++                 */
++                TransactionUtil.formFlushLogRecord(logRecord, datasetID, this, logManager.getNodeId(),
++                        dsInfo.getDatasetIndexes().size());
++                try {
++                    logManager.log(logRecord);
++                } catch (ACIDException e) {
++                    throw new HyracksDataException("could not write flush log", e);
++                }
++                flushLogCreated = true;
++            } else {
++                //trigger flush for temporary indexes without generating a FLUSH log.
++                triggerScheduleFlush(logRecord);
++            }
 +        }
 +    }
 +
 +    //This method is called sequentially by LogPage.notifyFlushTerminator in the sequence flushes were scheduled.
 +    public synchronized void triggerScheduleFlush(LogRecord logRecord) throws HyracksDataException {
 +        for (ILSMIndex lsmIndex : dsInfo.getDatasetIndexes()) {
 +
 +            //get resource
 +            ILSMIndexAccessor accessor = lsmIndex.createAccessor(NoOpOperationCallback.INSTANCE,
 +                    NoOpOperationCallback.INSTANCE);
 +
 +            //update resource lsn
 +            AbstractLSMIOOperationCallback ioOpCallback = (AbstractLSMIOOperationCallback) lsmIndex
 +                    .getIOOperationCallback();
 +            ioOpCallback.updateLastLSN(logRecord.getLSN());
 +
 +            //schedule flush after update
 +            accessor.scheduleFlush(lsmIndex.getIOOperationCallback());
 +        }
 +
 +        flushLogCreated = false;
 +    }
 +
 +    @Override
 +    public void exclusiveJobCommitted() throws HyracksDataException {
 +        numActiveOperations.set(0);
 +        flushIfRequested();
 +    }
 +
 +    public int getNumActiveOperations() {
 +        return numActiveOperations.get();
 +    }
 +
 +    private void incrementNumActiveOperations(IModificationOperationCallback modificationCallback) {
 +        //modificationCallback can be NoOpOperationCallback when redo/undo operations are executed.
 +        if (modificationCallback != NoOpOperationCallback.INSTANCE) {
 +            numActiveOperations.incrementAndGet();
 +            ((AbstractOperationCallback) modificationCallback).incrementLocalNumActiveOperations();
 +        }
 +    }
 +
 +    private void decrementNumActiveOperations(IModificationOperationCallback modificationCallback) {
 +        //modificationCallback can be NoOpOperationCallback when redo/undo operations are executed.
 +        if (modificationCallback != NoOpOperationCallback.INSTANCE) {
 +            numActiveOperations.decrementAndGet();
 +            ((AbstractOperationCallback) modificationCallback).decrementLocalNumActiveOperations();
 +        }
 +    }
 +
 +    public void cleanupNumActiveOperationsForAbortedJob(int numberOfActiveOperations) {
 +        numberOfActiveOperations *= -1;
 +        numActiveOperations.getAndAdd(numberOfActiveOperations);
 +    }
 +
 +    public boolean isFlushOnExit() {
 +        return flushOnExit;
 +    }
 +
 +    public void setFlushOnExit(boolean flushOnExit) {
 +        this.flushOnExit = flushOnExit;
 +    }
 +
 +    public boolean isFlushLogCreated() {
 +        return flushLogCreated;
 +    }
 +
 +}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/utils/StoragePathUtil.java
----------------------------------------------------------------------
diff --cc asterixdb/asterix-common/src/main/java/org/apache/asterix/common/utils/StoragePathUtil.java
index 5b4035c,0000000..78b06fb
mode 100644,000000..100644
--- a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/utils/StoragePathUtil.java
+++ b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/utils/StoragePathUtil.java
@@@ -1,73 -1,0 +1,73 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +package org.apache.asterix.common.utils;
 +
 +import java.io.File;
 +
 +import org.apache.asterix.common.cluster.ClusterPartition;
 +import org.apache.hyracks.algebricks.common.constraints.AlgebricksAbsolutePartitionConstraint;
 +import org.apache.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraint;
 +import org.apache.hyracks.algebricks.common.utils.Pair;
 +import org.apache.hyracks.api.io.FileReference;
 +import org.apache.hyracks.dataflow.std.file.ConstantFileSplitProvider;
 +import org.apache.hyracks.dataflow.std.file.FileSplit;
 +import org.apache.hyracks.dataflow.std.file.IFileSplitProvider;
 +
 +public class StoragePathUtil {
 +    public static final String PARTITION_DIR_PREFIX = "partition_";
 +    public static final String TEMP_DATASETS_STORAGE_FOLDER = "temp";
 +    public static final String DATASET_INDEX_NAME_SEPARATOR = "_idx_";
 +    public static final String ADAPTER_INSTANCE_PREFIX = "adapter_";
 +
 +    public static Pair<IFileSplitProvider, AlgebricksPartitionConstraint> splitProviderAndPartitionConstraints(
 +            FileSplit[] splits) {
 +        IFileSplitProvider splitProvider = new ConstantFileSplitProvider(splits);
 +        String[] loc = new String[splits.length];
 +        for (int p = 0; p < splits.length; p++) {
 +            loc[p] = splits[p].getNodeName();
 +        }
 +        AlgebricksPartitionConstraint pc = new AlgebricksAbsolutePartitionConstraint(loc);
 +        return new Pair<IFileSplitProvider, AlgebricksPartitionConstraint>(splitProvider, pc);
 +    }
 +
 +    public static FileSplit getFileSplitForClusterPartition(ClusterPartition partition, File relativeFile) {
 +        return new FileSplit(partition.getActiveNodeId(), new FileReference(relativeFile), partition.getIODeviceNum(),
 +                partition.getPartitionId());
 +    }
 +
 +    public static String prepareStoragePartitionPath(String storageDirName, int partitonId) {
 +        return storageDirName + File.separator + StoragePathUtil.PARTITION_DIR_PREFIX + partitonId;
 +    }
 +
 +    public static String prepareDataverseIndexName(String dataverseName, String datasetName, String idxName) {
 +        return prepareDataverseIndexName(dataverseName, prepareFullIndexName(datasetName, idxName));
 +    }
 +
 +    public static String prepareDataverseIndexName(String dataverseName, String fullIndexName) {
 +        return dataverseName + File.separator + fullIndexName;
 +    }
 +
 +    private static String prepareFullIndexName(String datasetName, String idxName) {
 +        return (datasetName + DATASET_INDEX_NAME_SEPARATOR + idxName);
 +    }
 +
-     public static int getPartitonNumFromName(String name) {
++    public static int getPartitionNumFromName(String name) {
 +        return Integer.parseInt(name.substring(PARTITION_DIR_PREFIX.length()));
 +    }
 +}


[02/50] [abbrv] incubator-asterixdb git commit: Edit TLP pom

Posted by im...@apache.org.
Edit TLP pom


Project: http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/commit/e1c203e4
Tree: http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/tree/e1c203e4
Diff: http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/diff/e1c203e4

Branch: refs/heads/master
Commit: e1c203e4d7067217d20e31d8b3b8be1810a03ce7
Parents: bc0607d
Author: Ian Maxon <im...@apache.org>
Authored: Wed Mar 30 17:33:53 2016 -0700
Committer: Ian Maxon <im...@apache.org>
Committed: Wed Mar 30 17:33:53 2016 -0700

----------------------------------------------------------------------
 pom.xml | 6 +++---
 1 file changed, 3 insertions(+), 3 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e1c203e4/pom.xml
----------------------------------------------------------------------
diff --git a/pom.xml b/pom.xml
index b3ff069..b5676ba 100644
--- a/pom.xml
+++ b/pom.xml
@@ -19,8 +19,8 @@
 <project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
   <modelVersion>4.0.0</modelVersion>
   <groupId>org.apache</groupId>
-  <artifactId>experimental</artifactId>
-  <version>0.0.1</version>
+  <artifactId>apache-asterixdb-fullstack</artifactId>
+  <version>0.8.9</version>
   <packaging>pom</packaging>
   <name>hyracks-asterix</name>
 
@@ -34,7 +34,7 @@
   </licenses>
 
   <modules>
-    <module>hyracks</module>
+    <module>hyracks-fullstack</module>
     <module>asterixdb</module>
   </modules>
 </project>


[06/50] [abbrv] incubator-asterixdb git commit: Add Compatibility for FileRemoveOperatorDescriptor

Posted by im...@apache.org.
Add Compatibility for FileRemoveOperatorDescriptor

Change-Id: Ic08ba645db6936df3c4c59be0e104dbb18088370
Reviewed-on: https://asterix-gerrit.ics.uci.edu/784
Tested-by: Jenkins <je...@fulliautomatix.ics.uci.edu>
Reviewed-by: Till Westmann <ti...@apache.org>


Project: http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/commit/3f849969
Tree: http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/tree/3f849969
Diff: http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/diff/3f849969

Branch: refs/heads/master
Commit: 3f849969f01effc9b6e7f22462ceb4b2bedabdc4
Parents: c820f2c
Author: Abdullah Alamoudi <ba...@gmail.com>
Authored: Mon Apr 4 23:59:55 2016 +0300
Committer: abdullah alamoudi <ba...@gmail.com>
Committed: Tue Apr 5 05:52:16 2016 -0700

----------------------------------------------------------------------
 .../dataflow/std/file/FileRemoveOperatorDescriptor.java     | 9 +++++++++
 1 file changed, 9 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/3f849969/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/file/FileRemoveOperatorDescriptor.java
----------------------------------------------------------------------
diff --git a/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/file/FileRemoveOperatorDescriptor.java b/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/file/FileRemoveOperatorDescriptor.java
index 43b1b34..c3883e8 100644
--- a/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/file/FileRemoveOperatorDescriptor.java
+++ b/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/file/FileRemoveOperatorDescriptor.java
@@ -46,6 +46,15 @@ public class FileRemoveOperatorDescriptor extends AbstractSingleActivityOperator
         this.quietly = quietly;
     }
 
+    /**
+     *
+     * @deprecated use {@link #FileRemoveOperatorDescriptor(IOperatorDescriptorRegistry spec, IFileSplitProvider fileSplitProvder, boolean quietly)} instead.
+     */
+    @Deprecated
+    public FileRemoveOperatorDescriptor(IOperatorDescriptorRegistry spec, IFileSplitProvider fileSplitProvder) {
+        this(spec, fileSplitProvder, false);
+    }
+
     private static final long serialVersionUID = 1L;
 
     @Override


[29/50] [abbrv] incubator-asterixdb git commit: Merge branch 'master' into hyracks-merge2

Posted by im...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/avg_int64_null/avg_int64_null.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/avg_int64_null/avg_int64_null.3.query.sqlpp
index b320332,0000000..8c063d3
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/avg_int64_null/avg_int64_null.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/avg_int64_null/avg_int64_null.3.query.sqlpp
@@@ -1,26 -1,0 +1,26 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +
 +use test;
 +
 +
- {'average':test.avg((
++{'average':test.coll_avg((
 +    select element x.int64Field
 +    from  Numeric as x
 +))};

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/avg_int8/avg_int8.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/avg_int8/avg_int8.3.query.sqlpp
index f2b9ac6,0000000..91ee6be
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/avg_int8/avg_int8.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/avg_int8/avg_int8.3.query.sqlpp
@@@ -1,26 -1,0 +1,26 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +
 +use test;
 +
 +
- select element test.avg((
++select element test.coll_avg((
 +    select element x
 +    from  [test.int8('1'),test.int8('2'),test.int8('3')] as x
 +));

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/avg_int8_null/avg_int8_null.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/avg_int8_null/avg_int8_null.3.query.sqlpp
index 1753b77,0000000..74fb4c4
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/avg_int8_null/avg_int8_null.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/avg_int8_null/avg_int8_null.3.query.sqlpp
@@@ -1,26 -1,0 +1,26 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +
 +use test;
 +
 +
- {'average':test.avg((
++{'average':test.coll_avg((
 +    select element x.int8Field
 +    from  Numeric as x
 +))};

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/avg_mixed/avg_mixed.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/avg_mixed/avg_mixed.3.query.sqlpp
index 988020f,0000000..4043084
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/avg_mixed/avg_mixed.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/avg_mixed/avg_mixed.3.query.sqlpp
@@@ -1,28 -1,0 +1,28 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +/*
 +* Description  : Run avg over an ordered list with mixed types
 +* Expected Res : Failure
 +* Date         : Jun 2nd 2013
 +*/
 +
- select element avg((
++select element coll_avg((
 +    select element x
 +    from  [float('2.0'),'hello world',93847382783847382,date('2013-01-01')] as x
 +));

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/count_01/count_01.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/count_01/count_01.3.query.sqlpp
index 5c81a50,0000000..fa90f85
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/count_01/count_01.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/count_01/count_01.3.query.sqlpp
@@@ -1,26 -1,0 +1,26 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +
 +use test;
 +
 +
- select element test.count((
++select element test.coll_count((
 +    select element x
 +    from  [1,2,3] as x
 +));

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/count_empty_01/count_empty_01.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/count_empty_01/count_empty_01.3.query.sqlpp
index 4be4da7,0000000..383671d
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/count_empty_01/count_empty_01.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/count_empty_01/count_empty_01.3.query.sqlpp
@@@ -1,29 -1,0 +1,29 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +/*
 + * Description    : Tests that count aggregation correctly returns 0 for an empty stream,
 + *                  without an aggregate combiner.
 + * Success        : Yes
 + */
 +
- select element count((
++select element coll_count((
 +    select element x
 +    from  [1,2,3] as x
 +    where (x > 10)
 +));

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/count_empty_02/count_empty_02.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/count_empty_02/count_empty_02.3.query.sqlpp
index c36e99a,0000000..67e6e77
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/count_empty_02/count_empty_02.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/count_empty_02/count_empty_02.3.query.sqlpp
@@@ -1,31 -1,0 +1,31 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +/*
 + * Description    : Tests that count aggregation correctly returns 0 for an empty stream,
 + *                  with an aggregate combiner.
 + * Success        : Yes
 + */
 +
 +use test;
 +
 +
- select element test.count((
++select element test.coll_count((
 +    select element x.val
 +    from  Test as x
 +));

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/count_null/count_null.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/count_null/count_null.3.query.sqlpp
index 5d60f10,0000000..ec84f7c
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/count_null/count_null.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/count_null/count_null.3.query.sqlpp
@@@ -1,26 -1,0 +1,26 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +
 +use test;
 +
 +
- {'count':test.count((
++{'count':test.coll_count((
 +    select element x.doubleField
 +    from  Numeric as x
 +))};

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/issue395/issue395.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/issue395/issue395.3.query.sqlpp
index 81aa828,0000000..feea992
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/issue395/issue395.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/issue395/issue395.3.query.sqlpp
@@@ -1,26 -1,0 +1,26 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +
 +use test;
 +
 +
- select element test.count((
++select element test.coll_count((
 +    select element l.name
 +    from  Employee as l
 +));

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/issue412_0/issue412_0.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/issue412_0/issue412_0.3.query.sqlpp
index 5dcf622,0000000..deaa68a
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/issue412_0/issue412_0.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/issue412_0/issue412_0.3.query.sqlpp
@@@ -1,20 -1,0 +1,20 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +
- count(['ASTERIX','Hyracks',null]);
++coll_count(['ASTERIX','Hyracks',null]);

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/issue412_1/issue412_1.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/issue412_1/issue412_1.3.query.sqlpp
index 78c9164,0000000..9da7811
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/issue412_1/issue412_1.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/issue412_1/issue412_1.3.query.sqlpp
@@@ -1,20 -1,0 +1,20 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +
- {'count':count([1,60,null]),'average':avg([1,60,null]),'sum':sum([1,60,null]),'min':min([1,60,null]),'max':max([1,60,null])};
++{'count':coll_count([1,60,null]),'average':coll_avg([1,60,null]),'sum':coll_sum([1,60,null]),'min':coll_min([1,60,null]),'max':coll_max([1,60,null])};

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/issue425_min_hetero_list/issue425_min_hetero_list.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/issue425_min_hetero_list/issue425_min_hetero_list.3.query.sqlpp
index 40631ed,0000000..b2c6c66
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/issue425_min_hetero_list/issue425_min_hetero_list.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/issue425_min_hetero_list/issue425_min_hetero_list.3.query.sqlpp
@@@ -1,20 -1,0 +1,20 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +
- min([23,748374857483]);
++coll_min([23,748374857483]);

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/issue425_min_hetero_list_1/issue425_min_hetero_list_1.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/issue425_min_hetero_list_1/issue425_min_hetero_list_1.3.query.sqlpp
index 8d438e3,0000000..e12e644
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/issue425_min_hetero_list_1/issue425_min_hetero_list_1.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/issue425_min_hetero_list_1/issue425_min_hetero_list_1.3.query.sqlpp
@@@ -1,20 -1,0 +1,20 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +
- min([748374857483,23,0.5]);
++coll_min([748374857483,23,0.5]);

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/issue425_sum_hetero_list/issue425_sum_hetero_list.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/issue425_sum_hetero_list/issue425_sum_hetero_list.3.query.sqlpp
index 17fd1aa,0000000..770b7dc
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/issue425_sum_hetero_list/issue425_sum_hetero_list.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/issue425_sum_hetero_list/issue425_sum_hetero_list.3.query.sqlpp
@@@ -1,20 -1,0 +1,20 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +
- sum([23,748374857483]);
++coll_sum([23,748374857483]);

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/issue425_sum_hetero_list_1/issue425_sum_hetero_list_1.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/issue425_sum_hetero_list_1/issue425_sum_hetero_list_1.3.query.sqlpp
index 41429b1,0000000..3932e23
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/issue425_sum_hetero_list_1/issue425_sum_hetero_list_1.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/issue425_sum_hetero_list_1/issue425_sum_hetero_list_1.3.query.sqlpp
@@@ -1,20 -1,0 +1,20 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +
- sum([748374857483,23,0.5]);
++coll_sum([748374857483,23,0.5]);

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/issue531_string_min_max/issue531_string_min_max.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/issue531_string_min_max/issue531_string_min_max.3.query.sqlpp
index b5b9917,0000000..2a3312e
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/issue531_string_min_max/issue531_string_min_max.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/issue531_string_min_max/issue531_string_min_max.3.query.sqlpp
@@@ -1,36 -1,0 +1,36 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +/**
 + * issue531_string_min_max
 + *
 + * Purpose: test the support of string values for min and max aggregation function
 + * Result: success
 + *
 + */
 +
 +use test;
 +
 +
- select element {'min':test.min((
++select element {'min':test.coll_min((
 +        select element l.name
 +        from  t1 as l
-     )),'max':test.max((
++    )),'max':test.coll_max((
 +        select element l.name
 +        from  t1 as l
 +    ))};

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/max_empty_01/max_empty_01.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/max_empty_01/max_empty_01.3.query.sqlpp
index 8d565eb,0000000..fa419e6
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/max_empty_01/max_empty_01.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/max_empty_01/max_empty_01.3.query.sqlpp
@@@ -1,32 -1,0 +1,32 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +/*
 + * Description    : Tests that max aggregation correctly returns null for an empty stream,
 + *                  without an aggregate combiner.
 + * Success        : Yes
 + */
 +
 +use test;
 +
 +
- select element test.max((
++select element test.coll_max((
 +    select element x
 +    from  [1,2,3] as x
 +    where (x > 10)
 +));

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/max_empty_02/max_empty_02.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/max_empty_02/max_empty_02.3.query.sqlpp
index 442f496,0000000..1ad7c2b
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/max_empty_02/max_empty_02.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/max_empty_02/max_empty_02.3.query.sqlpp
@@@ -1,31 -1,0 +1,31 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +/*
 + * Description    : Tests that max aggregation correctly returns null for an empty stream,
 + *                  with an aggregate combiner.
 + * Success        : Yes
 + */
 +
 +use test;
 +
 +
- select element test.max((
++select element test.coll_max((
 +    select element x.val
 +    from  Test as x
 +));

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/min_empty_01/min_empty_01.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/min_empty_01/min_empty_01.3.query.sqlpp
index d476fcf,0000000..aa84453
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/min_empty_01/min_empty_01.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/min_empty_01/min_empty_01.3.query.sqlpp
@@@ -1,32 -1,0 +1,32 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +/*
 + * Description    : Tests that min aggregation correctly returns null for an empty stream,
 + *                  without an aggregate combiner.
 + * Success        : Yes
 + */
 +
 +use test;
 +
 +
- select element test.min((
++select element test.coll_min((
 +    select element x
 +    from  [1,2,3] as x
 +    where (x > 10)
 +));

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/min_empty_02/min_empty_02.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/min_empty_02/min_empty_02.3.query.sqlpp
index ad14ddd,0000000..bd686b6
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/min_empty_02/min_empty_02.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/min_empty_02/min_empty_02.3.query.sqlpp
@@@ -1,31 -1,0 +1,31 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +/*
 + * Description    : Tests that min aggregation correctly returns null for an empty stream,
 + *                  with an aggregate combiner.
 + * Success        : Yes
 + */
 +
 +use test;
 +
 +
- select element test.min((
++select element test.coll_min((
 +    select element x.val
 +    from  Test as x
 +));

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/min_mixed/min_mixed.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/min_mixed/min_mixed.3.query.sqlpp
index 0b5dced,0000000..1802fee
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/min_mixed/min_mixed.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/min_mixed/min_mixed.3.query.sqlpp
@@@ -1,28 -1,0 +1,28 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +/*
 +* Description  : Run min over an ordered list with mixed types
 +* Expected Res : Failure
 +* Date         : Jun 2nd 2013
 +*/
 +
- select element min((
++select element coll_min((
 +    select element x
 +    from  [float('2.0'),'hello world',93847382783847382,date('2013-01-01')] as x
 +));

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/query-issue400/query-issue400.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/query-issue400/query-issue400.3.query.sqlpp
index d73c8ce,0000000..f910f12
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/query-issue400/query-issue400.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/query-issue400/query-issue400.3.query.sqlpp
@@@ -1,28 -1,0 +1,28 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +/*
 + * Description  : This test case is to verify the fix for issue400
 + *              : https://code.google.com/p/asterixdb/issues/detail?id=400
 + * Expected Res : Success
 + * Date         : 8th May 2013
 + */
 +
- count((select element i
++coll_count((select element i
 +from  [[1,2,3,4,5],[6,7,8,9]] as i
 +));

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/scalar_avg/scalar_avg.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/scalar_avg/scalar_avg.3.query.sqlpp
index 8312274,0000000..e61b9df
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/scalar_avg/scalar_avg.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/scalar_avg/scalar_avg.3.query.sqlpp
@@@ -1,35 -1,0 +1,35 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +/*
 + * Description    : Tests the scalar version of avg without nulls.
 + * Success        : Yes
 + */
 +
 +use test;
 +
 +
- with  i8 as test.avg([test.int8('1'),test.int8('2'),test.int8('3')]),
-       i16 as test.avg([test.int16('1'),test.int16('2'),test.int16('3')]),
-       i32 as test.avg([test.int32('1'),test.int32('2'),test.int32('3')]),
-       i64 as test.avg([test.int64('1'),test.int64('2'),test.int64('3')]),
-       f as test.avg([test.float('1'),test.float('2'),test.float('3')]),
-       d as test.avg([test.double('1'),test.double('2'),test.double('3')])
++with  i8 as test.coll_avg([test.int8('1'),test.int8('2'),test.int8('3')]),
++      i16 as test.coll_avg([test.int16('1'),test.int16('2'),test.int16('3')]),
++      i32 as test.coll_avg([test.int32('1'),test.int32('2'),test.int32('3')]),
++      i64 as test.coll_avg([test.int64('1'),test.int64('2'),test.int64('3')]),
++      f as test.coll_avg([test.float('1'),test.float('2'),test.float('3')]),
++      d as test.coll_avg([test.double('1'),test.double('2'),test.double('3')])
 +select element i
 +from  [i8,i16,i32,i64,f,d] as i
 +;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/scalar_avg_empty/scalar_avg_empty.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/scalar_avg_empty/scalar_avg_empty.3.query.sqlpp
index 39344f6,0000000..d2655e4
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/scalar_avg_empty/scalar_avg_empty.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/scalar_avg_empty/scalar_avg_empty.3.query.sqlpp
@@@ -1,24 -1,0 +1,24 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +/*
 + * Description    : Tests the scalar version of avg with an empty list.
 + * Success        : Yes
 + */
 +
- select element avg([]);
++select element coll_avg([]);

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/scalar_avg_null/scalar_avg_null.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/scalar_avg_null/scalar_avg_null.3.query.sqlpp
index 2e0293f,0000000..20f6a18
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/scalar_avg_null/scalar_avg_null.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/scalar_avg_null/scalar_avg_null.3.query.sqlpp
@@@ -1,35 -1,0 +1,35 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +/*
 + * Description    : Tests the scalar version of avg with nulls.
 + * Success        : Yes
 + */
 +
 +use test;
 +
 +
- with  i8 as test.avg([test.int8('1'),test.int8('2'),test.int8('3'),null]),
-       i16 as test.avg([test.int16('1'),test.int16('2'),test.int16('3'),null]),
-       i32 as test.avg([test.int32('1'),test.int32('2'),test.int32('3'),null]),
-       i64 as test.avg([test.int64('1'),test.int64('2'),test.int64('3'),null]),
-       f as test.avg([test.float('1'),test.float('2'),test.float('3'),null]),
-       d as test.avg([test.double('1'),test.double('2'),test.double('3'),null])
++with  i8 as test.coll_avg([test.int8('1'),test.int8('2'),test.int8('3'),null]),
++      i16 as test.coll_avg([test.int16('1'),test.int16('2'),test.int16('3'),null]),
++      i32 as test.coll_avg([test.int32('1'),test.int32('2'),test.int32('3'),null]),
++      i64 as test.coll_avg([test.int64('1'),test.int64('2'),test.int64('3'),null]),
++      f as test.coll_avg([test.float('1'),test.float('2'),test.float('3'),null]),
++      d as test.coll_avg([test.double('1'),test.double('2'),test.double('3'),null])
 +select element i
 +from  [i8,i16,i32,i64,f,d] as i
 +;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/scalar_count/scalar_count.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/scalar_count/scalar_count.3.query.sqlpp
index bcaad9e,0000000..8576a74
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/scalar_count/scalar_count.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/scalar_count/scalar_count.3.query.sqlpp
@@@ -1,36 -1,0 +1,36 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +/*
 + * Description    : Tests the scalar version of count without nulls.
 + * Success        : Yes
 + */
 +
 +use test;
 +
 +
- with  i8 as test.count([test.int8('1'),test.int8('2'),test.int8('3')]),
-       i16 as test.count([test.int16('1'),test.int16('2'),test.int16('3')]),
-       i32 as test.count([test.int32('1'),test.int32('2'),test.int32('3')]),
-       i64 as test.count([test.int64('1'),test.int64('2'),test.int64('3')]),
-       f as test.count([test.float('1'),test.float('2'),test.float('3')]),
-       d as test.count([test.double('1'),test.double('2'),test.double('3')]),
-       s as test.count(['a','b','c'])
++with  i8 as test.coll_count([test.int8('1'),test.int8('2'),test.int8('3')]),
++      i16 as test.coll_count([test.int16('1'),test.int16('2'),test.int16('3')]),
++      i32 as test.coll_count([test.int32('1'),test.int32('2'),test.int32('3')]),
++      i64 as test.coll_count([test.int64('1'),test.int64('2'),test.int64('3')]),
++      f as test.coll_count([test.float('1'),test.float('2'),test.float('3')]),
++      d as test.coll_count([test.double('1'),test.double('2'),test.double('3')]),
++      s as test.coll_count(['a','b','c'])
 +select element i
 +from  [i8,i16,i32,i64,f,d,s] as i
 +;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/scalar_count_empty/scalar_count_empty.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/scalar_count_empty/scalar_count_empty.3.query.sqlpp
index ce9798d,0000000..d2fff80
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/scalar_count_empty/scalar_count_empty.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/scalar_count_empty/scalar_count_empty.3.query.sqlpp
@@@ -1,27 -1,0 +1,27 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +/*
 + * Description    : Tests the scalar version of count with an empty list.
 + * Success        : Yes
 + */
 +
 +use test;
 +
 +
- select element test.count([]);
++select element test.coll_count([]);

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/scalar_count_null/scalar_count_null.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/scalar_count_null/scalar_count_null.3.query.sqlpp
index 8cb1e66,0000000..14eeb83
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/scalar_count_null/scalar_count_null.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/scalar_count_null/scalar_count_null.3.query.sqlpp
@@@ -1,36 -1,0 +1,36 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +/*
 + * Description    : Tests the scalar version of count with nulls.
 + * Success        : Yes
 + */
 +
 +use test;
 +
 +
- with  i8 as test.count([test.int8('1'),test.int8('2'),test.int8('3'),null]),
-       i16 as test.count([test.int16('1'),test.int16('2'),test.int16('3'),null]),
-       i32 as test.count([test.int32('1'),test.int32('2'),test.int32('3'),null]),
-       i64 as test.count([test.int64('1'),test.int64('2'),test.int64('3'),null]),
-       f as test.count([test.float('1'),test.float('2'),test.float('3'),null]),
-       d as test.count([test.double('1'),test.double('2'),test.double('3'),null]),
-       s as test.count(['a','b','c',null])
++with  i8 as test.coll_count([test.int8('1'),test.int8('2'),test.int8('3'),null]),
++      i16 as test.coll_count([test.int16('1'),test.int16('2'),test.int16('3'),null]),
++      i32 as test.coll_count([test.int32('1'),test.int32('2'),test.int32('3'),null]),
++      i64 as test.coll_count([test.int64('1'),test.int64('2'),test.int64('3'),null]),
++      f as test.coll_count([test.float('1'),test.float('2'),test.float('3'),null]),
++      d as test.coll_count([test.double('1'),test.double('2'),test.double('3'),null]),
++      s as test.coll_count(['a','b','c',null])
 +select element i
 +from  [i8,i16,i32,i64,f,d,s] as i
 +;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/scalar_max/scalar_max.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/scalar_max/scalar_max.3.query.sqlpp
index 0070b55,0000000..da37e7c
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/scalar_max/scalar_max.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/scalar_max/scalar_max.3.query.sqlpp
@@@ -1,37 -1,0 +1,37 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +/*
 + * Description    : Tests the scalar version of max without nulls.
 + * Success        : Yes
 + */
 +
 +use test;
 +
 +
- with  i8 as test.max([test.int8('1'),test.int8('2'),test.int8('3')]),
-       i16 as test.max([test.int16('1'),test.int16('2'),test.int16('3')]),
-       i32 as test.max([test.int32('1'),test.int32('2'),test.int32('3')]),
-       i64 as test.max([test.int64('1'),test.int64('2'),test.int64('3')]),
-       f as test.max([test.float('1'),test.float('2'),test.float('3')]),
-       d as test.max([test.double('1'),test.double('2'),test.double('3')]),
-       s as test.max(['foo','bar','world']),
-       dt as test.max([test.datetime('2012-03-01T00:00:00Z'),test.datetime('2012-01-01T00:00:00Z'),test.datetime('2012-02-01T00:00:00Z')])
++with  i8 as test.coll_max([test.int8('1'),test.int8('2'),test.int8('3')]),
++      i16 as test.coll_max([test.int16('1'),test.int16('2'),test.int16('3')]),
++      i32 as test.coll_max([test.int32('1'),test.int32('2'),test.int32('3')]),
++      i64 as test.coll_max([test.int64('1'),test.int64('2'),test.int64('3')]),
++      f as test.coll_max([test.float('1'),test.float('2'),test.float('3')]),
++      d as test.coll_max([test.double('1'),test.double('2'),test.double('3')]),
++      s as test.coll_max(['foo','bar','world']),
++      dt as test.coll_max([test.datetime('2012-03-01T00:00:00Z'),test.datetime('2012-01-01T00:00:00Z'),test.datetime('2012-02-01T00:00:00Z')])
 +select element i
 +from  [i8,i16,i32,i64,f,d,s,dt] as i
 +;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/scalar_max_empty/scalar_max_empty.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/scalar_max_empty/scalar_max_empty.3.query.sqlpp
index 3bbf7e5,0000000..7978919
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/scalar_max_empty/scalar_max_empty.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/scalar_max_empty/scalar_max_empty.3.query.sqlpp
@@@ -1,27 -1,0 +1,27 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +/*
 + * Description    : Tests the scalar version of max with an empty list.
 + * Success        : Yes
 + */
 +
 +use test;
 +
 +
- select element test.max([]);
++select element test.coll_max([]);

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/scalar_max_null/scalar_max_null.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/scalar_max_null/scalar_max_null.3.query.sqlpp
index bccf473,0000000..036a5bb
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/scalar_max_null/scalar_max_null.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/scalar_max_null/scalar_max_null.3.query.sqlpp
@@@ -1,37 -1,0 +1,37 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +/*
 + * Description    : Tests the scalar version of max with nulls.
 + * Success        : Yes
 + */
 +
 +use test;
 +
 +
- with  i8 as test.max([test.int8('1'),test.int8('2'),test.int8('3'),null]),
-       i16 as test.max([test.int16('1'),test.int16('2'),test.int16('3'),null]),
-       i32 as test.max([test.int32('1'),test.int32('2'),test.int32('3'),null]),
-       i64 as test.max([test.int64('1'),test.int64('2'),test.int64('3'),null]),
-       f as test.max([test.float('1'),test.float('2'),test.float('3'),null]),
-       d as test.max([test.double('1'),test.double('2'),test.double('3'),null]),
-       s as test.max(['foo','bar','world',null]),
-       dt as test.max([test.datetime('2012-03-01T00:00:00Z'),test.datetime('2012-01-01T00:00:00Z'),test.datetime('2012-02-01T00:00:00Z'),null])
++with  i8 as test.coll_max([test.int8('1'),test.int8('2'),test.int8('3'),null]),
++      i16 as test.coll_max([test.int16('1'),test.int16('2'),test.int16('3'),null]),
++      i32 as test.coll_max([test.int32('1'),test.int32('2'),test.int32('3'),null]),
++      i64 as test.coll_max([test.int64('1'),test.int64('2'),test.int64('3'),null]),
++      f as test.coll_max([test.float('1'),test.float('2'),test.float('3'),null]),
++      d as test.coll_max([test.double('1'),test.double('2'),test.double('3'),null]),
++      s as test.coll_max(['foo','bar','world',null]),
++      dt as test.coll_max([test.datetime('2012-03-01T00:00:00Z'),test.datetime('2012-01-01T00:00:00Z'),test.datetime('2012-02-01T00:00:00Z'),null])
 +select element i
 +from  [i8,i16,i32,i64,f,d,s,dt] as i
 +;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/scalar_min/scalar_min.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/scalar_min/scalar_min.3.query.sqlpp
index 89e07a1,0000000..783c168
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/scalar_min/scalar_min.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/scalar_min/scalar_min.3.query.sqlpp
@@@ -1,37 -1,0 +1,37 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +/*
 + * Description    : Tests the scalar version of min without nulls.
 + * Success        : Yes
 + */
 +
 +use test;
 +
 +
- with  i8 as test.min([test.int8('1'),test.int8('2'),test.int8('3')]),
-       i16 as test.min([test.int16('1'),test.int16('2'),test.int16('3')]),
-       i32 as test.min([test.int32('1'),test.int32('2'),test.int32('3')]),
-       i64 as test.min([test.int64('1'),test.int64('2'),test.int64('3')]),
-       f as test.min([test.float('1'),test.float('2'),test.float('3')]),
-       d as test.min([test.double('1'),test.double('2'),test.double('3')]),
-       s as test.min(['foo','bar','world']),
-       dt as test.min([test.datetime('2012-03-01T00:00:00Z'),test.datetime('2012-01-01T00:00:00Z'),test.datetime('2012-02-01T00:00:00Z')])
++with  i8 as test.coll_min([test.int8('1'),test.int8('2'),test.int8('3')]),
++      i16 as test.coll_min([test.int16('1'),test.int16('2'),test.int16('3')]),
++      i32 as test.coll_min([test.int32('1'),test.int32('2'),test.int32('3')]),
++      i64 as test.coll_min([test.int64('1'),test.int64('2'),test.int64('3')]),
++      f as test.coll_min([test.float('1'),test.float('2'),test.float('3')]),
++      d as test.coll_min([test.double('1'),test.double('2'),test.double('3')]),
++      s as test.coll_min(['foo','bar','world']),
++      dt as test.coll_min([test.datetime('2012-03-01T00:00:00Z'),test.datetime('2012-01-01T00:00:00Z'),test.datetime('2012-02-01T00:00:00Z')])
 +select element i
 +from  [i8,i16,i32,i64,f,d,s,dt] as i
 +;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/scalar_min_empty/scalar_min_empty.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/scalar_min_empty/scalar_min_empty.3.query.sqlpp
index 2d1db86,0000000..8a9f7ec
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/scalar_min_empty/scalar_min_empty.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/scalar_min_empty/scalar_min_empty.3.query.sqlpp
@@@ -1,27 -1,0 +1,27 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +/*
 + * Description    : Tests the scalar version of min with an empty list.
 + * Success        : Yes
 + */
 +
 +use test;
 +
 +
- select element test.min([]);
++select element test.coll_min([]);

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/scalar_min_null/scalar_min_null.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/scalar_min_null/scalar_min_null.3.query.sqlpp
index 71d04ee,0000000..4005624
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/scalar_min_null/scalar_min_null.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/scalar_min_null/scalar_min_null.3.query.sqlpp
@@@ -1,37 -1,0 +1,37 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +/*
 + * Description    : Tests the scalar version of min with nulls.
 + * Success        : Yes
 + */
 +
 +use test;
 +
 +
- with  i8 as test.min([test.int8('1'),test.int8('2'),test.int8('3'),null]),
-       i16 as test.min([test.int16('1'),test.int16('2'),test.int16('3'),null]),
-       i32 as test.min([test.int32('1'),test.int32('2'),test.int32('3'),null]),
-       i64 as test.min([test.int64('1'),test.int64('2'),test.int64('3'),null]),
-       f as test.min([test.float('1'),test.float('2'),test.float('3'),null]),
-       d as test.min([test.double('1'),test.double('2'),test.double('3'),null]),
-       s as test.min(['foo','bar','world',null]),
-       dt as test.min([test.datetime('2012-03-01T00:00:00Z'),test.datetime('2012-01-01T00:00:00Z'),test.datetime('2012-02-01T00:00:00Z'),null])
++with  i8 as test.coll_min([test.int8('1'),test.int8('2'),test.int8('3'),null]),
++      i16 as test.coll_min([test.int16('1'),test.int16('2'),test.int16('3'),null]),
++      i32 as test.coll_min([test.int32('1'),test.int32('2'),test.int32('3'),null]),
++      i64 as test.coll_min([test.int64('1'),test.int64('2'),test.int64('3'),null]),
++      f as test.coll_min([test.float('1'),test.float('2'),test.float('3'),null]),
++      d as test.coll_min([test.double('1'),test.double('2'),test.double('3'),null]),
++      s as test.coll_min(['foo','bar','world',null]),
++      dt as test.coll_min([test.datetime('2012-03-01T00:00:00Z'),test.datetime('2012-01-01T00:00:00Z'),test.datetime('2012-02-01T00:00:00Z'),null])
 +select element i
 +from  [i8,i16,i32,i64,f,d,s,dt] as i
 +;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/scalar_sum/scalar_sum.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/scalar_sum/scalar_sum.3.query.sqlpp
index 08a1049,0000000..eb9fa28
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/scalar_sum/scalar_sum.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/scalar_sum/scalar_sum.3.query.sqlpp
@@@ -1,35 -1,0 +1,35 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +/*
 + * Description    : Tests the scalar version of sum without nulls.
 + * Success        : Yes
 + */
 +
 +use test;
 +
 +
- with  i8 as test.sum([test.int8('1'),test.int8('2'),test.int8('3')]),
-       i16 as test.sum([test.int16('1'),test.int16('2'),test.int16('3')]),
-       i32 as test.sum([test.int32('1'),test.int32('2'),test.int32('3')]),
-       i64 as test.sum([test.int64('1'),test.int64('2'),test.int64('3')]),
-       f as test.sum([test.float('1'),test.float('2'),test.float('3')]),
-       d as test.sum([test.double('1'),test.double('2'),test.double('3')])
++with  i8 as test.coll_sum([test.int8('1'),test.int8('2'),test.int8('3')]),
++      i16 as test.coll_sum([test.int16('1'),test.int16('2'),test.int16('3')]),
++      i32 as test.coll_sum([test.int32('1'),test.int32('2'),test.int32('3')]),
++      i64 as test.coll_sum([test.int64('1'),test.int64('2'),test.int64('3')]),
++      f as test.coll_sum([test.float('1'),test.float('2'),test.float('3')]),
++      d as test.coll_sum([test.double('1'),test.double('2'),test.double('3')])
 +select element i
 +from  [i8,i16,i32,i64,f,d] as i
 +;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/scalar_sum_empty/scalar_sum_empty.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/scalar_sum_empty/scalar_sum_empty.3.query.sqlpp
index 8bd634e,0000000..a0d5516
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/scalar_sum_empty/scalar_sum_empty.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/scalar_sum_empty/scalar_sum_empty.3.query.sqlpp
@@@ -1,27 -1,0 +1,27 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +/*
 + * Description    : Tests the scalar version of sum with an empty list.
 + * Success        : Yes
 + */
 +
 +use test;
 +
 +
- select element test.sum([]);
++select element test.coll_sum([]);

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/scalar_sum_null/scalar_sum_null.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/scalar_sum_null/scalar_sum_null.3.query.sqlpp
index 04ecae3,0000000..fcb058c
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/scalar_sum_null/scalar_sum_null.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/scalar_sum_null/scalar_sum_null.3.query.sqlpp
@@@ -1,35 -1,0 +1,35 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +/*
 + * Description    : Tests the scalar version of sum with nulls.
 + * Success        : Yes
 + */
 +
 +use test;
 +
 +
- with  i8 as test.sum([test.int8('1'),test.int8('2'),test.int8('3'),null]),
-       i16 as test.sum([test.int16('1'),test.int16('2'),test.int16('3'),null]),
-       i32 as test.sum([test.int32('1'),test.int32('2'),test.int32('3'),null]),
-       i64 as test.sum([test.int64('1'),test.int64('2'),test.int64('3'),null]),
-       f as test.sum([test.float('1'),test.float('2'),test.float('3'),null]),
-       d as test.sum([test.double('1'),test.double('2'),test.double('3'),null])
++with  i8 as test.coll_sum([test.int8('1'),test.int8('2'),test.int8('3'),null]),
++      i16 as test.coll_sum([test.int16('1'),test.int16('2'),test.int16('3'),null]),
++      i32 as test.coll_sum([test.int32('1'),test.int32('2'),test.int32('3'),null]),
++      i64 as test.coll_sum([test.int64('1'),test.int64('2'),test.int64('3'),null]),
++      f as test.coll_sum([test.float('1'),test.float('2'),test.float('3'),null]),
++      d as test.coll_sum([test.double('1'),test.double('2'),test.double('3'),null])
 +select element i
 +from  [i8,i16,i32,i64,f,d] as i
 +;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/sum_double/sum_double.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/sum_double/sum_double.3.query.sqlpp
index bd7092e,0000000..e8a1f4d
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/sum_double/sum_double.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/sum_double/sum_double.3.query.sqlpp
@@@ -1,26 -1,0 +1,26 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +
 +use test;
 +
 +
- select element test.sum((
++select element test.coll_sum((
 +    select element x
 +    from  [1.0,2.0,3.0] as x
 +));

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/sum_double_null/sum_double_null.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/sum_double_null/sum_double_null.3.query.sqlpp
index ed194cf,0000000..7a17bab
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/sum_double_null/sum_double_null.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/sum_double_null/sum_double_null.3.query.sqlpp
@@@ -1,26 -1,0 +1,26 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +
 +use test;
 +
 +
- select element test.sum((
++select element test.coll_sum((
 +    select element x.doubleField
 +    from  Numeric as x
 +));

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/sum_empty_01/sum_empty_01.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/sum_empty_01/sum_empty_01.3.query.sqlpp
index a3cab15,0000000..bfc0e2c
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/sum_empty_01/sum_empty_01.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/sum_empty_01/sum_empty_01.3.query.sqlpp
@@@ -1,32 -1,0 +1,32 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +/*
 + * Description    : Tests that sum aggregation correctly returns null for an empty stream,
 + *                  without an aggregate combiner.
 + * Success        : Yes
 + */
 +
 +use test;
 +
 +
- select element test.sum((
++select element test.coll_sum((
 +    select element x
 +    from  [1,2,3] as x
 +    where (x > 10)
 +));

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/sum_empty_02/sum_empty_02.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/sum_empty_02/sum_empty_02.3.query.sqlpp
index 181ff0d,0000000..ce6abf4
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/sum_empty_02/sum_empty_02.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/sum_empty_02/sum_empty_02.3.query.sqlpp
@@@ -1,31 -1,0 +1,31 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +/*
 + * Description    : Tests that sum aggregation correctly returns null for an empty stream,
 + *                  with an aggregate combiner.
 + * Success        : Yes
 + */
 +
 +use test;
 +
 +
- select element test.sum((
++select element test.coll_sum((
 +    select element x.val
 +    from  Test as x
 +));

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/sum_float/sum_float.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/sum_float/sum_float.3.query.sqlpp
index a9bceb8,0000000..8f90964
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/sum_float/sum_float.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/sum_float/sum_float.3.query.sqlpp
@@@ -1,26 -1,0 +1,26 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +
 +use test;
 +
 +
- select element test.sum((
++select element test.coll_sum((
 +    select element x
 +    from  [test.float('1'),test.float('2'),test.float('3')] as x
 +));

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/sum_float_null/sum_float_null.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/sum_float_null/sum_float_null.3.query.sqlpp
index 30ef69c,0000000..f4c25bb
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/sum_float_null/sum_float_null.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/sum_float_null/sum_float_null.3.query.sqlpp
@@@ -1,26 -1,0 +1,26 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +
 +use test;
 +
 +
- select element test.sum((
++select element test.coll_sum((
 +    select element x.floatField
 +    from  Numeric as x
 +));

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/sum_int16/sum_int16.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/sum_int16/sum_int16.3.query.sqlpp
index 7f8b538,0000000..57381b6
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/sum_int16/sum_int16.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/sum_int16/sum_int16.3.query.sqlpp
@@@ -1,26 -1,0 +1,26 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +
 +use test;
 +
 +
- select element test.sum((
++select element test.coll_sum((
 +    select element x
 +    from  [test.int16('1'),test.int16('2'),test.int16('3')] as x
 +));



[47/50] [abbrv] incubator-asterixdb git commit: Move merged files

Posted by im...@apache.org.
Move merged files


Project: http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/commit/d630d1a2
Tree: http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/tree/d630d1a2
Diff: http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/diff/d630d1a2

Branch: refs/heads/master
Commit: d630d1a2480387a77512f189beca5c7042e196ef
Parents: 8516517
Author: Ian Maxon <im...@apache.org>
Authored: Wed Apr 6 19:57:08 2016 -0700
Committer: Ian Maxon <im...@apache.org>
Committed: Wed Apr 6 19:57:08 2016 -0700

----------------------------------------------------------------------
 .../classad-parser-new.1.ddl.aql                |  31 --
 .../classad-parser-new.2.lib.aql                |  19 -
 .../classad-parser-new.3.ddl.aql                |  27 --
 .../classad-parser-new.4.query.aql              |  23 -
 .../classad-parser-new.5.lib.aql                |  19 -
 .../classad-parser-old.1.ddl.aql                |  31 --
 .../classad-parser-old.2.lib.aql                |  19 -
 .../classad-parser-old.3.ddl.aql                |  25 -
 .../classad-parser-old.4.query.aql              |  23 -
 .../classad-parser-old.5.lib.aql                |  19 -
 .../invalid-format/invalid-format.1.ddl.aql     |  34 --
 .../invalid-format/invalid-format.2.query.aql   |  27 --
 .../feeds/twitter-feed/twitter-feed.1.ddl.aql   |  54 ---
 .../twitter-feed/twitter-feed.2.update.aql      |  25 -
 .../file-not-found/file-not-found.1.ddl.aql     |  29 --
 .../file-not-found/file-not-found.2.update.aql  |  30 --
 .../temp_primary_plus_ngram_flush.1.ddl.aql     |  67 ---
 .../temp_primary_plus_ngram_flush.2.update.aql  |  46 --
 .../temp_primary_plus_ngram_flush.3.query.aql   |  29 --
 .../global-aggregate/q01/q01.1.ddl.sqlpp        |  51 --
 .../global-aggregate/q01/q01.2.update.sqlpp     |  30 --
 .../global-aggregate/q01/q01.3.query.sqlpp      |  23 -
 .../global-aggregate/q02/q02.1.ddl.sqlpp        |  51 --
 .../global-aggregate/q02/q02.2.update.sqlpp     |  30 --
 .../global-aggregate/q02/q02.3.query.sqlpp      |  23 -
 .../global-aggregate/q03/q03.1.ddl.sqlpp        |  51 --
 .../global-aggregate/q03/q03.2.update.sqlpp     |  30 --
 .../global-aggregate/q03/q03.3.query.sqlpp      |  23 -
 .../global-aggregate/q04/q04.1.ddl.sqlpp        |  51 --
 .../global-aggregate/q04/q04.2.update.sqlpp     |  30 --
 .../global-aggregate/q04/q04.3.query.sqlpp      |  23 -
 .../q05_error/q05_error.1.ddl.sqlpp             |  51 --
 .../q05_error/q05_error.2.update.sqlpp          |  30 --
 .../q05_error/q05_error.3.query.sqlpp           |  23 -
 .../q06_error/q06_error.1.ddl.sqlpp             |  51 --
 .../q06_error/q06_error.2.update.sqlpp          |  30 --
 .../q06_error/q06_error.3.query.sqlpp           |  23 -
 .../q07_error/q07_error.1.ddl.sqlpp             |  51 --
 .../q07_error/q07_error.2.update.sqlpp          |  30 --
 .../q07_error/q07_error.3.query.sqlpp           |  26 -
 .../global-aggregate/q08/q08.1.ddl.sqlpp        |  51 --
 .../global-aggregate/q08/q08.2.update.sqlpp     |  30 --
 .../global-aggregate/q08/q08.3.query.sqlpp      |  24 -
 .../q06_forecast_revenue_change.4.query.sqlpp   |  27 --
 .../classad-parser-new/classad-parser-new.1.adm | 100 ----
 .../classad-parser-old/classad-parser-old.1.adm |   5 -
 .../results/global-aggregate/q01/q01.1.adm      |   1 -
 .../results/global-aggregate/q02/q02.1.adm      |   1 -
 .../results/global-aggregate/q08/q08.1.adm      |  10 -
 .../temp_primary_plus_ngram_flush.3.adm         |   1 -
 .../q06_forecast_revenue_change.2.adm           |   1 -
 .../q06_forecast_revenue_change.4.ast           |  73 ---
 .../api/IStreamNotificationHandler.java         |  28 --
 .../reader/IndexingStreamRecordReader.java      | 101 ----
 .../reader/stream/StreamRecordReader.java       |  98 ----
 .../stream/StreamRecordReaderFactory.java       |  72 ---
 .../provider/StreamRecordReaderProvider.java    |  81 ----
 .../resources/classad-with-temporals.classads   | 134 ------
 .../results/classad-with-temporals.adm          |   1 -
 .../dataset-with-meta-record.1.script.aql       |   1 -
 .../dataset-with-meta-record.2.ddl.aql          |  58 ---
 .../dataset-with-meta-record.3.update.aql       |  27 --
 .../dataset-with-meta-record.4.script.aql       |   1 -
 .../dataset-with-meta-record.5.query.aql        |  29 --
 .../dataset-with-meta-record.6.script.aql       |   1 -
 .../dataset-with-meta-record.5.adm              |   1 -
 .../create_and_start.sh                         |   1 -
 .../dataset-with-meta-record/stop_and_delete.sh |   3 -
 .../dataset-with-meta-record/stop_and_start.sh  |   2 -
 .../visitor/InlineColumnAliasVisitor.java       | 450 ------------------
 .../SqlppBuiltinFunctionRewriteVisitor.java     |  46 --
 .../SqlppGlobalAggregationSugarVisitor.java     |  67 ---
 .../visitor/SqlppGroupBySugarVisitor.java       | 123 -----
 .../rewrites/visitor/SqlppGroupByVisitor.java   | 168 -------
 .../visitor/SqlppInlineUdfsVisitor.java         | 237 ----------
 .../visitor/VariableCheckAndRewriteVisitor.java | 102 ----
 .../lang/sqlpp/util/FunctionMapUtil.java        | 150 ------
 .../visitor/CheckSql92AggregateVisitor.java     | 265 -----------
 .../lang/sqlpp/visitor/DeepCopyVisitor.java     | 415 ----------------
 .../lang/sqlpp/visitor/FreeVariableVisitor.java | 471 -------------------
 .../AbstractSqlppExpressionScopingVisitor.java  | 284 -----------
 .../AbstractSqlppSimpleExpressionVisitor.java   | 347 --------------
 .../classad-parser-new.1.ddl.aql                |  31 ++
 .../classad-parser-new.2.lib.aql                |  19 +
 .../classad-parser-new.3.ddl.aql                |  27 ++
 .../classad-parser-new.4.query.aql              |  23 +
 .../classad-parser-new.5.lib.aql                |  19 +
 .../classad-parser-old.1.ddl.aql                |  31 ++
 .../classad-parser-old.2.lib.aql                |  19 +
 .../classad-parser-old.3.ddl.aql                |  25 +
 .../classad-parser-old.4.query.aql              |  23 +
 .../classad-parser-old.5.lib.aql                |  19 +
 .../invalid-format/invalid-format.1.ddl.aql     |  34 ++
 .../invalid-format/invalid-format.2.query.aql   |  27 ++
 .../feeds/twitter-feed/twitter-feed.1.ddl.aql   |  54 +++
 .../twitter-feed/twitter-feed.2.update.aql      |  25 +
 .../file-not-found/file-not-found.1.ddl.aql     |  29 ++
 .../file-not-found/file-not-found.2.update.aql  |  30 ++
 .../temp_primary_plus_ngram_flush.1.ddl.aql     |  67 +++
 .../temp_primary_plus_ngram_flush.2.update.aql  |  46 ++
 .../temp_primary_plus_ngram_flush.3.query.aql   |  29 ++
 .../global-aggregate/q01/q01.1.ddl.sqlpp        |  51 ++
 .../global-aggregate/q01/q01.2.update.sqlpp     |  30 ++
 .../global-aggregate/q01/q01.3.query.sqlpp      |  23 +
 .../global-aggregate/q02/q02.1.ddl.sqlpp        |  51 ++
 .../global-aggregate/q02/q02.2.update.sqlpp     |  30 ++
 .../global-aggregate/q02/q02.3.query.sqlpp      |  23 +
 .../global-aggregate/q03/q03.1.ddl.sqlpp        |  51 ++
 .../global-aggregate/q03/q03.2.update.sqlpp     |  30 ++
 .../global-aggregate/q03/q03.3.query.sqlpp      |  23 +
 .../global-aggregate/q04/q04.1.ddl.sqlpp        |  51 ++
 .../global-aggregate/q04/q04.2.update.sqlpp     |  30 ++
 .../global-aggregate/q04/q04.3.query.sqlpp      |  23 +
 .../q05_error/q05_error.1.ddl.sqlpp             |  51 ++
 .../q05_error/q05_error.2.update.sqlpp          |  30 ++
 .../q05_error/q05_error.3.query.sqlpp           |  23 +
 .../q06_error/q06_error.1.ddl.sqlpp             |  51 ++
 .../q06_error/q06_error.2.update.sqlpp          |  30 ++
 .../q06_error/q06_error.3.query.sqlpp           |  23 +
 .../q07_error/q07_error.1.ddl.sqlpp             |  51 ++
 .../q07_error/q07_error.2.update.sqlpp          |  30 ++
 .../q07_error/q07_error.3.query.sqlpp           |  26 +
 .../global-aggregate/q08/q08.1.ddl.sqlpp        |  51 ++
 .../global-aggregate/q08/q08.2.update.sqlpp     |  30 ++
 .../global-aggregate/q08/q08.3.query.sqlpp      |  24 +
 .../q06_forecast_revenue_change.4.query.sqlpp   |  27 ++
 .../classad-parser-new/classad-parser-new.1.adm | 100 ++++
 .../classad-parser-old/classad-parser-old.1.adm |   5 +
 .../results/global-aggregate/q01/q01.1.adm      |   1 +
 .../results/global-aggregate/q02/q02.1.adm      |   1 +
 .../results/global-aggregate/q08/q08.1.adm      |  10 +
 .../temp_primary_plus_ngram_flush.3.adm         |   1 +
 .../q06_forecast_revenue_change.2.adm           |   1 +
 .../q06_forecast_revenue_change.4.ast           |  73 +++
 .../api/IStreamNotificationHandler.java         |  28 ++
 .../reader/IndexingStreamRecordReader.java      | 101 ++++
 .../reader/stream/StreamRecordReader.java       |  98 ++++
 .../stream/StreamRecordReaderFactory.java       |  72 +++
 .../provider/StreamRecordReaderProvider.java    |  81 ++++
 .../resources/classad-with-temporals.classads   | 134 ++++++
 .../results/classad-with-temporals.adm          |   1 +
 .../dataset-with-meta-record.1.script.aql       |   1 +
 .../dataset-with-meta-record.2.ddl.aql          |  58 +++
 .../dataset-with-meta-record.3.update.aql       |  27 ++
 .../dataset-with-meta-record.4.script.aql       |   1 +
 .../dataset-with-meta-record.5.query.aql        |  29 ++
 .../dataset-with-meta-record.6.script.aql       |   1 +
 .../dataset-with-meta-record.5.adm              |   1 +
 .../create_and_start.sh                         |   1 +
 .../dataset-with-meta-record/stop_and_delete.sh |   3 +
 .../dataset-with-meta-record/stop_and_start.sh  |   2 +
 .../visitor/InlineColumnAliasVisitor.java       | 450 ++++++++++++++++++
 .../SqlppBuiltinFunctionRewriteVisitor.java     |  46 ++
 .../SqlppGlobalAggregationSugarVisitor.java     |  67 +++
 .../visitor/SqlppGroupBySugarVisitor.java       | 123 +++++
 .../rewrites/visitor/SqlppGroupByVisitor.java   | 168 +++++++
 .../visitor/SqlppInlineUdfsVisitor.java         | 237 ++++++++++
 .../visitor/VariableCheckAndRewriteVisitor.java | 102 ++++
 .../lang/sqlpp/util/FunctionMapUtil.java        | 150 ++++++
 .../visitor/CheckSql92AggregateVisitor.java     | 265 +++++++++++
 .../lang/sqlpp/visitor/DeepCopyVisitor.java     | 415 ++++++++++++++++
 .../lang/sqlpp/visitor/FreeVariableVisitor.java | 471 +++++++++++++++++++
 .../AbstractSqlppExpressionScopingVisitor.java  | 284 +++++++++++
 .../AbstractSqlppSimpleExpressionVisitor.java   | 347 ++++++++++++++
 164 files changed, 5396 insertions(+), 5396 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d630d1a2/asterix-app/src/test/resources/runtimets/queries/external-library/classad-parser-new/classad-parser-new.1.ddl.aql
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/resources/runtimets/queries/external-library/classad-parser-new/classad-parser-new.1.ddl.aql b/asterix-app/src/test/resources/runtimets/queries/external-library/classad-parser-new/classad-parser-new.1.ddl.aql
deleted file mode 100644
index 21c8ac6..0000000
--- a/asterix-app/src/test/resources/runtimets/queries/external-library/classad-parser-new/classad-parser-new.1.ddl.aql
+++ /dev/null
@@ -1,31 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-/*
- * Description  : Create an adapter that uses external parser to parse data from files
- * Expected Res : Success
- * Date         : Feb, 09, 2016
- */
-
-drop dataverse externallibtest if exists;
-create dataverse externallibtest;
-use dataverse externallibtest;
-
-create type Classad as open {
-GlobalJobId: string
-};

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d630d1a2/asterix-app/src/test/resources/runtimets/queries/external-library/classad-parser-new/classad-parser-new.2.lib.aql
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/resources/runtimets/queries/external-library/classad-parser-new/classad-parser-new.2.lib.aql b/asterix-app/src/test/resources/runtimets/queries/external-library/classad-parser-new/classad-parser-new.2.lib.aql
deleted file mode 100644
index 0290611..0000000
--- a/asterix-app/src/test/resources/runtimets/queries/external-library/classad-parser-new/classad-parser-new.2.lib.aql
+++ /dev/null
@@ -1,19 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-install externallibtest testlib src/test/resources/externallib/testlib-zip-binary-assembly.zip
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d630d1a2/asterix-app/src/test/resources/runtimets/queries/external-library/classad-parser-new/classad-parser-new.3.ddl.aql
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/resources/runtimets/queries/external-library/classad-parser-new/classad-parser-new.3.ddl.aql b/asterix-app/src/test/resources/runtimets/queries/external-library/classad-parser-new/classad-parser-new.3.ddl.aql
deleted file mode 100644
index 9a7f043..0000000
--- a/asterix-app/src/test/resources/runtimets/queries/external-library/classad-parser-new/classad-parser-new.3.ddl.aql
+++ /dev/null
@@ -1,27 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-use dataverse externallibtest;
-
-create external dataset Condor(Classad) using localfs(
-("path"="asterix_nc1://data/external-parser/jobads.new"),
-("format"="semi-structured"),
-("record-start"="["),
-("record-end"="]"),
-("parser"="testlib#org.apache.asterix.external.library.ClassAdParserFactory"));

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d630d1a2/asterix-app/src/test/resources/runtimets/queries/external-library/classad-parser-new/classad-parser-new.4.query.aql
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/resources/runtimets/queries/external-library/classad-parser-new/classad-parser-new.4.query.aql b/asterix-app/src/test/resources/runtimets/queries/external-library/classad-parser-new/classad-parser-new.4.query.aql
deleted file mode 100644
index 9d5d499..0000000
--- a/asterix-app/src/test/resources/runtimets/queries/external-library/classad-parser-new/classad-parser-new.4.query.aql
+++ /dev/null
@@ -1,23 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-use dataverse externallibtest;
-
-for $x in dataset Condor
-order by $x.GlobalJobId
-return $x;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d630d1a2/asterix-app/src/test/resources/runtimets/queries/external-library/classad-parser-new/classad-parser-new.5.lib.aql
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/resources/runtimets/queries/external-library/classad-parser-new/classad-parser-new.5.lib.aql b/asterix-app/src/test/resources/runtimets/queries/external-library/classad-parser-new/classad-parser-new.5.lib.aql
deleted file mode 100644
index 86af80f..0000000
--- a/asterix-app/src/test/resources/runtimets/queries/external-library/classad-parser-new/classad-parser-new.5.lib.aql
+++ /dev/null
@@ -1,19 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-uninstall externallibtest testlib
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d630d1a2/asterix-app/src/test/resources/runtimets/queries/external-library/classad-parser-old/classad-parser-old.1.ddl.aql
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/resources/runtimets/queries/external-library/classad-parser-old/classad-parser-old.1.ddl.aql b/asterix-app/src/test/resources/runtimets/queries/external-library/classad-parser-old/classad-parser-old.1.ddl.aql
deleted file mode 100644
index 21c8ac6..0000000
--- a/asterix-app/src/test/resources/runtimets/queries/external-library/classad-parser-old/classad-parser-old.1.ddl.aql
+++ /dev/null
@@ -1,31 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-/*
- * Description  : Create an adapter that uses external parser to parse data from files
- * Expected Res : Success
- * Date         : Feb, 09, 2016
- */
-
-drop dataverse externallibtest if exists;
-create dataverse externallibtest;
-use dataverse externallibtest;
-
-create type Classad as open {
-GlobalJobId: string
-};

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d630d1a2/asterix-app/src/test/resources/runtimets/queries/external-library/classad-parser-old/classad-parser-old.2.lib.aql
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/resources/runtimets/queries/external-library/classad-parser-old/classad-parser-old.2.lib.aql b/asterix-app/src/test/resources/runtimets/queries/external-library/classad-parser-old/classad-parser-old.2.lib.aql
deleted file mode 100644
index 0290611..0000000
--- a/asterix-app/src/test/resources/runtimets/queries/external-library/classad-parser-old/classad-parser-old.2.lib.aql
+++ /dev/null
@@ -1,19 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-install externallibtest testlib src/test/resources/externallib/testlib-zip-binary-assembly.zip
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d630d1a2/asterix-app/src/test/resources/runtimets/queries/external-library/classad-parser-old/classad-parser-old.3.ddl.aql
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/resources/runtimets/queries/external-library/classad-parser-old/classad-parser-old.3.ddl.aql b/asterix-app/src/test/resources/runtimets/queries/external-library/classad-parser-old/classad-parser-old.3.ddl.aql
deleted file mode 100644
index 5b2d50c..0000000
--- a/asterix-app/src/test/resources/runtimets/queries/external-library/classad-parser-old/classad-parser-old.3.ddl.aql
+++ /dev/null
@@ -1,25 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-use dataverse externallibtest;
-
-create external dataset Condor(Classad) using localfs(
-("path"="asterix_nc1://data/external-parser/jobads.old"),
-("format"="line-separated"),
-("parser"="testlib#org.apache.asterix.external.library.ClassAdParserFactory"));

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d630d1a2/asterix-app/src/test/resources/runtimets/queries/external-library/classad-parser-old/classad-parser-old.4.query.aql
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/resources/runtimets/queries/external-library/classad-parser-old/classad-parser-old.4.query.aql b/asterix-app/src/test/resources/runtimets/queries/external-library/classad-parser-old/classad-parser-old.4.query.aql
deleted file mode 100644
index 9d5d499..0000000
--- a/asterix-app/src/test/resources/runtimets/queries/external-library/classad-parser-old/classad-parser-old.4.query.aql
+++ /dev/null
@@ -1,23 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-use dataverse externallibtest;
-
-for $x in dataset Condor
-order by $x.GlobalJobId
-return $x;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d630d1a2/asterix-app/src/test/resources/runtimets/queries/external-library/classad-parser-old/classad-parser-old.5.lib.aql
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/resources/runtimets/queries/external-library/classad-parser-old/classad-parser-old.5.lib.aql b/asterix-app/src/test/resources/runtimets/queries/external-library/classad-parser-old/classad-parser-old.5.lib.aql
deleted file mode 100644
index 86af80f..0000000
--- a/asterix-app/src/test/resources/runtimets/queries/external-library/classad-parser-old/classad-parser-old.5.lib.aql
+++ /dev/null
@@ -1,19 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-uninstall externallibtest testlib
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d630d1a2/asterix-app/src/test/resources/runtimets/queries/external/invalid-format/invalid-format.1.ddl.aql
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/resources/runtimets/queries/external/invalid-format/invalid-format.1.ddl.aql b/asterix-app/src/test/resources/runtimets/queries/external/invalid-format/invalid-format.1.ddl.aql
deleted file mode 100644
index 7c668e4..0000000
--- a/asterix-app/src/test/resources/runtimets/queries/external/invalid-format/invalid-format.1.ddl.aql
+++ /dev/null
@@ -1,34 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-/**
- * Testing an external dataset with invalid adapter format parameter value
- * Expected result: fail - Unknown data format.
- */
-
-drop dataverse temp if exists;
-create dataverse temp
-use dataverse temp;
-
-create type test as closed {
-  id: int32
-};
-
-create external dataset testds (test) using localfs(
-("path"="asterix_nc1://data/csv/sample_04_quote_error.csv"),
-("format"="add"));
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d630d1a2/asterix-app/src/test/resources/runtimets/queries/external/invalid-format/invalid-format.2.query.aql
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/resources/runtimets/queries/external/invalid-format/invalid-format.2.query.aql b/asterix-app/src/test/resources/runtimets/queries/external/invalid-format/invalid-format.2.query.aql
deleted file mode 100644
index 438e0b6..0000000
--- a/asterix-app/src/test/resources/runtimets/queries/external/invalid-format/invalid-format.2.query.aql
+++ /dev/null
@@ -1,27 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-/**
- * Testing an external dataset with invalid adapter format parameter value
- * Expected result: fail - Unknown data format.
- */
-
-use dataverse temp;
-
-for $i in dataset testds
-return $i;
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d630d1a2/asterix-app/src/test/resources/runtimets/queries/feeds/twitter-feed/twitter-feed.1.ddl.aql
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/resources/runtimets/queries/feeds/twitter-feed/twitter-feed.1.ddl.aql b/asterix-app/src/test/resources/runtimets/queries/feeds/twitter-feed/twitter-feed.1.ddl.aql
deleted file mode 100644
index d7827c5..0000000
--- a/asterix-app/src/test/resources/runtimets/queries/feeds/twitter-feed/twitter-feed.1.ddl.aql
+++ /dev/null
@@ -1,54 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-/*
- * Description  : Create a twitter feed with missing parameters
- * Expected Res : Failure
- */
-
-drop dataverse feeds if exists;
-create dataverse feeds;
-use dataverse feeds;
-
-create type TwitterUser if not exists as open{
-screen_name: string,
-language: string,
-friends_count: int32,
-status_count: int32,
-name: string,
-followers_count: int32
-};
-
-create type Tweet if not exists as open{
-id: string,
-user: TwitterUser,
-latitude:double,
-longitude:double,
-created_at:string,
-message_text:string
-};
-
-create dataset Tweets (Tweet)
-primary key id;
-
-create feed TwitterFeed using push_twitter(
-("type-name"="Tweet"),
-("format"="twitter-status"),
-("consumer.key"="************"),
-("access.token"="**********"),
-("access.token.secret"="*************"));
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d630d1a2/asterix-app/src/test/resources/runtimets/queries/feeds/twitter-feed/twitter-feed.2.update.aql
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/resources/runtimets/queries/feeds/twitter-feed/twitter-feed.2.update.aql b/asterix-app/src/test/resources/runtimets/queries/feeds/twitter-feed/twitter-feed.2.update.aql
deleted file mode 100644
index 6712969..0000000
--- a/asterix-app/src/test/resources/runtimets/queries/feeds/twitter-feed/twitter-feed.2.update.aql
+++ /dev/null
@@ -1,25 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-/*
- * Description  : Create a twitter feed with missing parameters
- * Expected Res : Failure
- */
-
-use dataverse feeds;
-connect feed TwitterFeed to dataset Tweets;
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d630d1a2/asterix-app/src/test/resources/runtimets/queries/load/file-not-found/file-not-found.1.ddl.aql
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/resources/runtimets/queries/load/file-not-found/file-not-found.1.ddl.aql b/asterix-app/src/test/resources/runtimets/queries/load/file-not-found/file-not-found.1.ddl.aql
deleted file mode 100644
index 37a8f14..0000000
--- a/asterix-app/src/test/resources/runtimets/queries/load/file-not-found/file-not-found.1.ddl.aql
+++ /dev/null
@@ -1,29 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-/**
- * Test loading from a file that does not exist.
- * Expected result: fail - File not found.
- */
-
-drop dataverse broken if exists;
-create dataverse broken;
-use dataverse broken;
-
-create type xtype as closed { id: int32 };
-create dataset X(xtype) primary key id;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d630d1a2/asterix-app/src/test/resources/runtimets/queries/load/file-not-found/file-not-found.2.update.aql
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/resources/runtimets/queries/load/file-not-found/file-not-found.2.update.aql b/asterix-app/src/test/resources/runtimets/queries/load/file-not-found/file-not-found.2.update.aql
deleted file mode 100644
index c26ffd5..0000000
--- a/asterix-app/src/test/resources/runtimets/queries/load/file-not-found/file-not-found.2.update.aql
+++ /dev/null
@@ -1,30 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-/**
- * Test loading from a file that does not exist.
- * Expected result: fail - File not found.
- */
-
-use dataverse broken;
-
-load dataset X using localfs(
-  ("path"="asterix_nc1://bla"),
-  ("format"="delimited-text"),
-  ("delimiter"="|")
-);

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d630d1a2/asterix-app/src/test/resources/runtimets/queries/temp-dataset/temp_primary_plus_ngram_flush/temp_primary_plus_ngram_flush.1.ddl.aql
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/resources/runtimets/queries/temp-dataset/temp_primary_plus_ngram_flush/temp_primary_plus_ngram_flush.1.ddl.aql b/asterix-app/src/test/resources/runtimets/queries/temp-dataset/temp_primary_plus_ngram_flush/temp_primary_plus_ngram_flush.1.ddl.aql
deleted file mode 100644
index 671e5a2..0000000
--- a/asterix-app/src/test/resources/runtimets/queries/temp-dataset/temp_primary_plus_ngram_flush/temp_primary_plus_ngram_flush.1.ddl.aql
+++ /dev/null
@@ -1,67 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-/*
- * Test case Name  : temp_primary_plus_ngram_flush.aql
- * Description     : Check that flush for temporary datasets with ngram indexes succeeds.
- * Expected Result : Success
- * Date            : Apr 4 2016
- */
-
-drop dataverse recovery if exists;
-create dataverse recovery;
-use dataverse recovery;
-
-/* For raw Fragile data */
-create type FragileTypeRaw as closed {
-row_id: int32,
-sid: int32,
-date: string,
-day: int32,
-time: string,
-bpm: int32,
-RR: float,
-text: string,
-location: point,
-text2: string
-};
-
-/* For cleaned Fragile data */
-create type FragileType as closed {
-row_id: int32,
-sid: int32,
-date: date,
-day: int32,
-time: time,
-bpm: int32,
-RR: float,
-text: string,
-location: point,
-text2: string
-};
-
-/* Create dataset for loading raw Fragile data */
-create temporary dataset Fragile_raw (FragileTypeRaw)
-primary key row_id;
-
-/* Create dataset for cleaned Fragile data */
-create temporary dataset Fragile (FragileType)
-primary key row_id;
-
-/* Create default secondary index on dataset clean Fragile */
-create index cfText2Ix on Fragile(text2) type ngram(3);
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d630d1a2/asterix-app/src/test/resources/runtimets/queries/temp-dataset/temp_primary_plus_ngram_flush/temp_primary_plus_ngram_flush.2.update.aql
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/resources/runtimets/queries/temp-dataset/temp_primary_plus_ngram_flush/temp_primary_plus_ngram_flush.2.update.aql b/asterix-app/src/test/resources/runtimets/queries/temp-dataset/temp_primary_plus_ngram_flush/temp_primary_plus_ngram_flush.2.update.aql
deleted file mode 100644
index 1b3cbd3..0000000
--- a/asterix-app/src/test/resources/runtimets/queries/temp-dataset/temp_primary_plus_ngram_flush/temp_primary_plus_ngram_flush.2.update.aql
+++ /dev/null
@@ -1,46 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-/*
- * Test case Name  : temp_primary_plus_ngram_flush.aql
- * Description     : Check that flush for temporary datasets with ngram indexes succeeds.
- * Expected Result : Success
- * Date            : Apr 4 2016
- */
-
-use dataverse recovery;
-
-load dataset Fragile_raw using localfs
-(("path"="asterix_nc1://data/csv/fragile_02.adm"),("format"="adm")) pre-sorted;
-
-/* Load Fragile data from raw dataset into cleaned dataset */
-insert into dataset Fragile (
-for $t in dataset Fragile_raw
-where $t.row_id <= 1000
-return {
-"row_id": $t.row_id,
-"sid": $t.sid,
-"date": date($t.date),
-"day": $t.day,
-"time": parse-time($t.time, "h:m:s"),
-"bpm": $t.bpm,
-"RR": $t.RR,
-"text": $t.text,
-"location": $t.location,
-"text2": $t.text2}
-);
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d630d1a2/asterix-app/src/test/resources/runtimets/queries/temp-dataset/temp_primary_plus_ngram_flush/temp_primary_plus_ngram_flush.3.query.aql
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/resources/runtimets/queries/temp-dataset/temp_primary_plus_ngram_flush/temp_primary_plus_ngram_flush.3.query.aql b/asterix-app/src/test/resources/runtimets/queries/temp-dataset/temp_primary_plus_ngram_flush/temp_primary_plus_ngram_flush.3.query.aql
deleted file mode 100644
index 67c41d6..0000000
--- a/asterix-app/src/test/resources/runtimets/queries/temp-dataset/temp_primary_plus_ngram_flush/temp_primary_plus_ngram_flush.3.query.aql
+++ /dev/null
@@ -1,29 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-/*
- * Test case Name  : temp_primary_plus_ngram_flush.aql
- * Description     : Check that flush for temporary datasets with ngram indexes succeeds.
- * Expected Result : Success
- * Date            : Apr 4 2016
- */
-
-use dataverse recovery;
-
-count (for $x in dataset Fragile
-where contains($x.text2, "location") return $x);
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d630d1a2/asterix-app/src/test/resources/runtimets/queries_sqlpp/global-aggregate/q01/q01.1.ddl.sqlpp
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/resources/runtimets/queries_sqlpp/global-aggregate/q01/q01.1.ddl.sqlpp b/asterix-app/src/test/resources/runtimets/queries_sqlpp/global-aggregate/q01/q01.1.ddl.sqlpp
deleted file mode 100644
index a7c021b..0000000
--- a/asterix-app/src/test/resources/runtimets/queries_sqlpp/global-aggregate/q01/q01.1.ddl.sqlpp
+++ /dev/null
@@ -1,51 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-drop  database TinySocial if exists;
-create  database TinySocial;
-
-use TinySocial;
-
-
-create type TinySocial.TwitterUserType as
-{
-  "screen-name" : string
-}
-
-create type TinySocial.TweetMessageType as {
-  tweetid : string
-}
-
-create type TinySocial.FacebookUserType as
- open {
-  id : int64
-}
-
-create type TinySocial.FacebookMessageType as
- open {
-  "message-id" : int64
-}
-
-create  table FacebookUsers(FacebookUserType) primary key id;
-create  table FacebookMessages(FacebookMessageType) primary key "message-id";
-create  table TwitterUsers(TwitterUserType) primary key "screen-name";
-create  table TweetMessages(TweetMessageType) primary key tweetid hints ("CARDINALITY"="100");
-create  index fbUserSinceIdx  on FacebookUsers ("user-since":datetime) type btree enforced;
-create  index fbAuthorIdx  on FacebookMessages ("author-id":int64) type btree enforced;
-

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d630d1a2/asterix-app/src/test/resources/runtimets/queries_sqlpp/global-aggregate/q01/q01.2.update.sqlpp
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/resources/runtimets/queries_sqlpp/global-aggregate/q01/q01.2.update.sqlpp b/asterix-app/src/test/resources/runtimets/queries_sqlpp/global-aggregate/q01/q01.2.update.sqlpp
deleted file mode 100644
index 4b757cd..0000000
--- a/asterix-app/src/test/resources/runtimets/queries_sqlpp/global-aggregate/q01/q01.2.update.sqlpp
+++ /dev/null
@@ -1,30 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-use TinySocial;
-
-
-load  table FacebookUsers using "localfs" (("path"="asterix_nc1://data/tinysocial/fbu.adm"),("format"="adm"));
-
-load  table FacebookMessages using "localfs" (("path"="asterix_nc1://data/tinysocial/fbm.adm"),("format"="adm"));
-
-load  table TwitterUsers using "localfs" (("path"="asterix_nc1://data/tinysocial/twu.adm"),("format"="adm"));
-
-load  table TweetMessages using "localfs" (("path"="asterix_nc1://data/tinysocial/twm.adm"),("format"="adm"));
-

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d630d1a2/asterix-app/src/test/resources/runtimets/queries_sqlpp/global-aggregate/q01/q01.3.query.sqlpp
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/resources/runtimets/queries_sqlpp/global-aggregate/q01/q01.3.query.sqlpp b/asterix-app/src/test/resources/runtimets/queries_sqlpp/global-aggregate/q01/q01.3.query.sqlpp
deleted file mode 100644
index 6664c91..0000000
--- a/asterix-app/src/test/resources/runtimets/queries_sqlpp/global-aggregate/q01/q01.3.query.sqlpp
+++ /dev/null
@@ -1,23 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-USE TinySocial;
-
-SELECT count(u."friend-ids") count
-FROM FacebookUsers u;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d630d1a2/asterix-app/src/test/resources/runtimets/queries_sqlpp/global-aggregate/q02/q02.1.ddl.sqlpp
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/resources/runtimets/queries_sqlpp/global-aggregate/q02/q02.1.ddl.sqlpp b/asterix-app/src/test/resources/runtimets/queries_sqlpp/global-aggregate/q02/q02.1.ddl.sqlpp
deleted file mode 100644
index a7c021b..0000000
--- a/asterix-app/src/test/resources/runtimets/queries_sqlpp/global-aggregate/q02/q02.1.ddl.sqlpp
+++ /dev/null
@@ -1,51 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-drop  database TinySocial if exists;
-create  database TinySocial;
-
-use TinySocial;
-
-
-create type TinySocial.TwitterUserType as
-{
-  "screen-name" : string
-}
-
-create type TinySocial.TweetMessageType as {
-  tweetid : string
-}
-
-create type TinySocial.FacebookUserType as
- open {
-  id : int64
-}
-
-create type TinySocial.FacebookMessageType as
- open {
-  "message-id" : int64
-}
-
-create  table FacebookUsers(FacebookUserType) primary key id;
-create  table FacebookMessages(FacebookMessageType) primary key "message-id";
-create  table TwitterUsers(TwitterUserType) primary key "screen-name";
-create  table TweetMessages(TweetMessageType) primary key tweetid hints ("CARDINALITY"="100");
-create  index fbUserSinceIdx  on FacebookUsers ("user-since":datetime) type btree enforced;
-create  index fbAuthorIdx  on FacebookMessages ("author-id":int64) type btree enforced;
-

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d630d1a2/asterix-app/src/test/resources/runtimets/queries_sqlpp/global-aggregate/q02/q02.2.update.sqlpp
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/resources/runtimets/queries_sqlpp/global-aggregate/q02/q02.2.update.sqlpp b/asterix-app/src/test/resources/runtimets/queries_sqlpp/global-aggregate/q02/q02.2.update.sqlpp
deleted file mode 100644
index 4b757cd..0000000
--- a/asterix-app/src/test/resources/runtimets/queries_sqlpp/global-aggregate/q02/q02.2.update.sqlpp
+++ /dev/null
@@ -1,30 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-use TinySocial;
-
-
-load  table FacebookUsers using "localfs" (("path"="asterix_nc1://data/tinysocial/fbu.adm"),("format"="adm"));
-
-load  table FacebookMessages using "localfs" (("path"="asterix_nc1://data/tinysocial/fbm.adm"),("format"="adm"));
-
-load  table TwitterUsers using "localfs" (("path"="asterix_nc1://data/tinysocial/twu.adm"),("format"="adm"));
-
-load  table TweetMessages using "localfs" (("path"="asterix_nc1://data/tinysocial/twm.adm"),("format"="adm"));
-

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d630d1a2/asterix-app/src/test/resources/runtimets/queries_sqlpp/global-aggregate/q02/q02.3.query.sqlpp
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/resources/runtimets/queries_sqlpp/global-aggregate/q02/q02.3.query.sqlpp b/asterix-app/src/test/resources/runtimets/queries_sqlpp/global-aggregate/q02/q02.3.query.sqlpp
deleted file mode 100644
index 1204809..0000000
--- a/asterix-app/src/test/resources/runtimets/queries_sqlpp/global-aggregate/q02/q02.3.query.sqlpp
+++ /dev/null
@@ -1,23 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-USE TinySocial;
-
-SELECT 1 foo, COUNT(u."friend-ids") count
-FROM FacebookUsers u;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d630d1a2/asterix-app/src/test/resources/runtimets/queries_sqlpp/global-aggregate/q03/q03.1.ddl.sqlpp
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/resources/runtimets/queries_sqlpp/global-aggregate/q03/q03.1.ddl.sqlpp b/asterix-app/src/test/resources/runtimets/queries_sqlpp/global-aggregate/q03/q03.1.ddl.sqlpp
deleted file mode 100644
index a7c021b..0000000
--- a/asterix-app/src/test/resources/runtimets/queries_sqlpp/global-aggregate/q03/q03.1.ddl.sqlpp
+++ /dev/null
@@ -1,51 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-drop  database TinySocial if exists;
-create  database TinySocial;
-
-use TinySocial;
-
-
-create type TinySocial.TwitterUserType as
-{
-  "screen-name" : string
-}
-
-create type TinySocial.TweetMessageType as {
-  tweetid : string
-}
-
-create type TinySocial.FacebookUserType as
- open {
-  id : int64
-}
-
-create type TinySocial.FacebookMessageType as
- open {
-  "message-id" : int64
-}
-
-create  table FacebookUsers(FacebookUserType) primary key id;
-create  table FacebookMessages(FacebookMessageType) primary key "message-id";
-create  table TwitterUsers(TwitterUserType) primary key "screen-name";
-create  table TweetMessages(TweetMessageType) primary key tweetid hints ("CARDINALITY"="100");
-create  index fbUserSinceIdx  on FacebookUsers ("user-since":datetime) type btree enforced;
-create  index fbAuthorIdx  on FacebookMessages ("author-id":int64) type btree enforced;
-

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d630d1a2/asterix-app/src/test/resources/runtimets/queries_sqlpp/global-aggregate/q03/q03.2.update.sqlpp
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/resources/runtimets/queries_sqlpp/global-aggregate/q03/q03.2.update.sqlpp b/asterix-app/src/test/resources/runtimets/queries_sqlpp/global-aggregate/q03/q03.2.update.sqlpp
deleted file mode 100644
index 4b757cd..0000000
--- a/asterix-app/src/test/resources/runtimets/queries_sqlpp/global-aggregate/q03/q03.2.update.sqlpp
+++ /dev/null
@@ -1,30 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-use TinySocial;
-
-
-load  table FacebookUsers using "localfs" (("path"="asterix_nc1://data/tinysocial/fbu.adm"),("format"="adm"));
-
-load  table FacebookMessages using "localfs" (("path"="asterix_nc1://data/tinysocial/fbm.adm"),("format"="adm"));
-
-load  table TwitterUsers using "localfs" (("path"="asterix_nc1://data/tinysocial/twu.adm"),("format"="adm"));
-
-load  table TweetMessages using "localfs" (("path"="asterix_nc1://data/tinysocial/twm.adm"),("format"="adm"));
-

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d630d1a2/asterix-app/src/test/resources/runtimets/queries_sqlpp/global-aggregate/q03/q03.3.query.sqlpp
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/resources/runtimets/queries_sqlpp/global-aggregate/q03/q03.3.query.sqlpp b/asterix-app/src/test/resources/runtimets/queries_sqlpp/global-aggregate/q03/q03.3.query.sqlpp
deleted file mode 100644
index c0bf442..0000000
--- a/asterix-app/src/test/resources/runtimets/queries_sqlpp/global-aggregate/q03/q03.3.query.sqlpp
+++ /dev/null
@@ -1,23 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-USE TinySocial;
-
-SELECT COUNT(1) count
-FROM FacebookUsers u;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d630d1a2/asterix-app/src/test/resources/runtimets/queries_sqlpp/global-aggregate/q04/q04.1.ddl.sqlpp
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/resources/runtimets/queries_sqlpp/global-aggregate/q04/q04.1.ddl.sqlpp b/asterix-app/src/test/resources/runtimets/queries_sqlpp/global-aggregate/q04/q04.1.ddl.sqlpp
deleted file mode 100644
index a7c021b..0000000
--- a/asterix-app/src/test/resources/runtimets/queries_sqlpp/global-aggregate/q04/q04.1.ddl.sqlpp
+++ /dev/null
@@ -1,51 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-drop  database TinySocial if exists;
-create  database TinySocial;
-
-use TinySocial;
-
-
-create type TinySocial.TwitterUserType as
-{
-  "screen-name" : string
-}
-
-create type TinySocial.TweetMessageType as {
-  tweetid : string
-}
-
-create type TinySocial.FacebookUserType as
- open {
-  id : int64
-}
-
-create type TinySocial.FacebookMessageType as
- open {
-  "message-id" : int64
-}
-
-create  table FacebookUsers(FacebookUserType) primary key id;
-create  table FacebookMessages(FacebookMessageType) primary key "message-id";
-create  table TwitterUsers(TwitterUserType) primary key "screen-name";
-create  table TweetMessages(TweetMessageType) primary key tweetid hints ("CARDINALITY"="100");
-create  index fbUserSinceIdx  on FacebookUsers ("user-since":datetime) type btree enforced;
-create  index fbAuthorIdx  on FacebookMessages ("author-id":int64) type btree enforced;
-

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d630d1a2/asterix-app/src/test/resources/runtimets/queries_sqlpp/global-aggregate/q04/q04.2.update.sqlpp
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/resources/runtimets/queries_sqlpp/global-aggregate/q04/q04.2.update.sqlpp b/asterix-app/src/test/resources/runtimets/queries_sqlpp/global-aggregate/q04/q04.2.update.sqlpp
deleted file mode 100644
index 4b757cd..0000000
--- a/asterix-app/src/test/resources/runtimets/queries_sqlpp/global-aggregate/q04/q04.2.update.sqlpp
+++ /dev/null
@@ -1,30 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-use TinySocial;
-
-
-load  table FacebookUsers using "localfs" (("path"="asterix_nc1://data/tinysocial/fbu.adm"),("format"="adm"));
-
-load  table FacebookMessages using "localfs" (("path"="asterix_nc1://data/tinysocial/fbm.adm"),("format"="adm"));
-
-load  table TwitterUsers using "localfs" (("path"="asterix_nc1://data/tinysocial/twu.adm"),("format"="adm"));
-
-load  table TweetMessages using "localfs" (("path"="asterix_nc1://data/tinysocial/twm.adm"),("format"="adm"));
-

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d630d1a2/asterix-app/src/test/resources/runtimets/queries_sqlpp/global-aggregate/q04/q04.3.query.sqlpp
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/resources/runtimets/queries_sqlpp/global-aggregate/q04/q04.3.query.sqlpp b/asterix-app/src/test/resources/runtimets/queries_sqlpp/global-aggregate/q04/q04.3.query.sqlpp
deleted file mode 100644
index 4d1b4d5..0000000
--- a/asterix-app/src/test/resources/runtimets/queries_sqlpp/global-aggregate/q04/q04.3.query.sqlpp
+++ /dev/null
@@ -1,23 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-USE TinySocial;
-
-SELECT COUNT([1,2,3]) count
-FROM FacebookUsers u;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d630d1a2/asterix-app/src/test/resources/runtimets/queries_sqlpp/global-aggregate/q05_error/q05_error.1.ddl.sqlpp
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/resources/runtimets/queries_sqlpp/global-aggregate/q05_error/q05_error.1.ddl.sqlpp b/asterix-app/src/test/resources/runtimets/queries_sqlpp/global-aggregate/q05_error/q05_error.1.ddl.sqlpp
deleted file mode 100644
index a7c021b..0000000
--- a/asterix-app/src/test/resources/runtimets/queries_sqlpp/global-aggregate/q05_error/q05_error.1.ddl.sqlpp
+++ /dev/null
@@ -1,51 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-drop  database TinySocial if exists;
-create  database TinySocial;
-
-use TinySocial;
-
-
-create type TinySocial.TwitterUserType as
-{
-  "screen-name" : string
-}
-
-create type TinySocial.TweetMessageType as {
-  tweetid : string
-}
-
-create type TinySocial.FacebookUserType as
- open {
-  id : int64
-}
-
-create type TinySocial.FacebookMessageType as
- open {
-  "message-id" : int64
-}
-
-create  table FacebookUsers(FacebookUserType) primary key id;
-create  table FacebookMessages(FacebookMessageType) primary key "message-id";
-create  table TwitterUsers(TwitterUserType) primary key "screen-name";
-create  table TweetMessages(TweetMessageType) primary key tweetid hints ("CARDINALITY"="100");
-create  index fbUserSinceIdx  on FacebookUsers ("user-since":datetime) type btree enforced;
-create  index fbAuthorIdx  on FacebookMessages ("author-id":int64) type btree enforced;
-

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d630d1a2/asterix-app/src/test/resources/runtimets/queries_sqlpp/global-aggregate/q05_error/q05_error.2.update.sqlpp
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/resources/runtimets/queries_sqlpp/global-aggregate/q05_error/q05_error.2.update.sqlpp b/asterix-app/src/test/resources/runtimets/queries_sqlpp/global-aggregate/q05_error/q05_error.2.update.sqlpp
deleted file mode 100644
index 4b757cd..0000000
--- a/asterix-app/src/test/resources/runtimets/queries_sqlpp/global-aggregate/q05_error/q05_error.2.update.sqlpp
+++ /dev/null
@@ -1,30 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-use TinySocial;
-
-
-load  table FacebookUsers using "localfs" (("path"="asterix_nc1://data/tinysocial/fbu.adm"),("format"="adm"));
-
-load  table FacebookMessages using "localfs" (("path"="asterix_nc1://data/tinysocial/fbm.adm"),("format"="adm"));
-
-load  table TwitterUsers using "localfs" (("path"="asterix_nc1://data/tinysocial/twu.adm"),("format"="adm"));
-
-load  table TweetMessages using "localfs" (("path"="asterix_nc1://data/tinysocial/twm.adm"),("format"="adm"));
-

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d630d1a2/asterix-app/src/test/resources/runtimets/queries_sqlpp/global-aggregate/q05_error/q05_error.3.query.sqlpp
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/resources/runtimets/queries_sqlpp/global-aggregate/q05_error/q05_error.3.query.sqlpp b/asterix-app/src/test/resources/runtimets/queries_sqlpp/global-aggregate/q05_error/q05_error.3.query.sqlpp
deleted file mode 100644
index 7922734..0000000
--- a/asterix-app/src/test/resources/runtimets/queries_sqlpp/global-aggregate/q05_error/q05_error.3.query.sqlpp
+++ /dev/null
@@ -1,23 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-USE TinySocial;
-
-SELECT u.name name, COUNT(u."friend-ids") count
-FROM FacebookUsers u;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d630d1a2/asterix-app/src/test/resources/runtimets/queries_sqlpp/global-aggregate/q06_error/q06_error.1.ddl.sqlpp
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/resources/runtimets/queries_sqlpp/global-aggregate/q06_error/q06_error.1.ddl.sqlpp b/asterix-app/src/test/resources/runtimets/queries_sqlpp/global-aggregate/q06_error/q06_error.1.ddl.sqlpp
deleted file mode 100644
index a7c021b..0000000
--- a/asterix-app/src/test/resources/runtimets/queries_sqlpp/global-aggregate/q06_error/q06_error.1.ddl.sqlpp
+++ /dev/null
@@ -1,51 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-drop  database TinySocial if exists;
-create  database TinySocial;
-
-use TinySocial;
-
-
-create type TinySocial.TwitterUserType as
-{
-  "screen-name" : string
-}
-
-create type TinySocial.TweetMessageType as {
-  tweetid : string
-}
-
-create type TinySocial.FacebookUserType as
- open {
-  id : int64
-}
-
-create type TinySocial.FacebookMessageType as
- open {
-  "message-id" : int64
-}
-
-create  table FacebookUsers(FacebookUserType) primary key id;
-create  table FacebookMessages(FacebookMessageType) primary key "message-id";
-create  table TwitterUsers(TwitterUserType) primary key "screen-name";
-create  table TweetMessages(TweetMessageType) primary key tweetid hints ("CARDINALITY"="100");
-create  index fbUserSinceIdx  on FacebookUsers ("user-since":datetime) type btree enforced;
-create  index fbAuthorIdx  on FacebookMessages ("author-id":int64) type btree enforced;
-

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d630d1a2/asterix-app/src/test/resources/runtimets/queries_sqlpp/global-aggregate/q06_error/q06_error.2.update.sqlpp
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/resources/runtimets/queries_sqlpp/global-aggregate/q06_error/q06_error.2.update.sqlpp b/asterix-app/src/test/resources/runtimets/queries_sqlpp/global-aggregate/q06_error/q06_error.2.update.sqlpp
deleted file mode 100644
index 4b757cd..0000000
--- a/asterix-app/src/test/resources/runtimets/queries_sqlpp/global-aggregate/q06_error/q06_error.2.update.sqlpp
+++ /dev/null
@@ -1,30 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-use TinySocial;
-
-
-load  table FacebookUsers using "localfs" (("path"="asterix_nc1://data/tinysocial/fbu.adm"),("format"="adm"));
-
-load  table FacebookMessages using "localfs" (("path"="asterix_nc1://data/tinysocial/fbm.adm"),("format"="adm"));
-
-load  table TwitterUsers using "localfs" (("path"="asterix_nc1://data/tinysocial/twu.adm"),("format"="adm"));
-
-load  table TweetMessages using "localfs" (("path"="asterix_nc1://data/tinysocial/twm.adm"),("format"="adm"));
-

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d630d1a2/asterix-app/src/test/resources/runtimets/queries_sqlpp/global-aggregate/q06_error/q06_error.3.query.sqlpp
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/resources/runtimets/queries_sqlpp/global-aggregate/q06_error/q06_error.3.query.sqlpp b/asterix-app/src/test/resources/runtimets/queries_sqlpp/global-aggregate/q06_error/q06_error.3.query.sqlpp
deleted file mode 100644
index 907afb0..0000000
--- a/asterix-app/src/test/resources/runtimets/queries_sqlpp/global-aggregate/q06_error/q06_error.3.query.sqlpp
+++ /dev/null
@@ -1,23 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-USE TinySocial;
-
-SELECT COLL_COUNT(u.name) count
-FROM FacebookUsers u;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d630d1a2/asterix-app/src/test/resources/runtimets/queries_sqlpp/global-aggregate/q07_error/q07_error.1.ddl.sqlpp
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/resources/runtimets/queries_sqlpp/global-aggregate/q07_error/q07_error.1.ddl.sqlpp b/asterix-app/src/test/resources/runtimets/queries_sqlpp/global-aggregate/q07_error/q07_error.1.ddl.sqlpp
deleted file mode 100644
index a7c021b..0000000
--- a/asterix-app/src/test/resources/runtimets/queries_sqlpp/global-aggregate/q07_error/q07_error.1.ddl.sqlpp
+++ /dev/null
@@ -1,51 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-drop  database TinySocial if exists;
-create  database TinySocial;
-
-use TinySocial;
-
-
-create type TinySocial.TwitterUserType as
-{
-  "screen-name" : string
-}
-
-create type TinySocial.TweetMessageType as {
-  tweetid : string
-}
-
-create type TinySocial.FacebookUserType as
- open {
-  id : int64
-}
-
-create type TinySocial.FacebookMessageType as
- open {
-  "message-id" : int64
-}
-
-create  table FacebookUsers(FacebookUserType) primary key id;
-create  table FacebookMessages(FacebookMessageType) primary key "message-id";
-create  table TwitterUsers(TwitterUserType) primary key "screen-name";
-create  table TweetMessages(TweetMessageType) primary key tweetid hints ("CARDINALITY"="100");
-create  index fbUserSinceIdx  on FacebookUsers ("user-since":datetime) type btree enforced;
-create  index fbAuthorIdx  on FacebookMessages ("author-id":int64) type btree enforced;
-

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d630d1a2/asterix-app/src/test/resources/runtimets/queries_sqlpp/global-aggregate/q07_error/q07_error.2.update.sqlpp
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/resources/runtimets/queries_sqlpp/global-aggregate/q07_error/q07_error.2.update.sqlpp b/asterix-app/src/test/resources/runtimets/queries_sqlpp/global-aggregate/q07_error/q07_error.2.update.sqlpp
deleted file mode 100644
index 4b757cd..0000000
--- a/asterix-app/src/test/resources/runtimets/queries_sqlpp/global-aggregate/q07_error/q07_error.2.update.sqlpp
+++ /dev/null
@@ -1,30 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-use TinySocial;
-
-
-load  table FacebookUsers using "localfs" (("path"="asterix_nc1://data/tinysocial/fbu.adm"),("format"="adm"));
-
-load  table FacebookMessages using "localfs" (("path"="asterix_nc1://data/tinysocial/fbm.adm"),("format"="adm"));
-
-load  table TwitterUsers using "localfs" (("path"="asterix_nc1://data/tinysocial/twu.adm"),("format"="adm"));
-
-load  table TweetMessages using "localfs" (("path"="asterix_nc1://data/tinysocial/twm.adm"),("format"="adm"));
-

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d630d1a2/asterix-app/src/test/resources/runtimets/queries_sqlpp/global-aggregate/q07_error/q07_error.3.query.sqlpp
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/resources/runtimets/queries_sqlpp/global-aggregate/q07_error/q07_error.3.query.sqlpp b/asterix-app/src/test/resources/runtimets/queries_sqlpp/global-aggregate/q07_error/q07_error.3.query.sqlpp
deleted file mode 100644
index 67b3d68..0000000
--- a/asterix-app/src/test/resources/runtimets/queries_sqlpp/global-aggregate/q07_error/q07_error.3.query.sqlpp
+++ /dev/null
@@ -1,26 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-USE TinySocial;
-
-COUNT(
-  ( SELECT u.name count
-    FROM FacebookUsers u
-  )
-);

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d630d1a2/asterix-app/src/test/resources/runtimets/queries_sqlpp/global-aggregate/q08/q08.1.ddl.sqlpp
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/resources/runtimets/queries_sqlpp/global-aggregate/q08/q08.1.ddl.sqlpp b/asterix-app/src/test/resources/runtimets/queries_sqlpp/global-aggregate/q08/q08.1.ddl.sqlpp
deleted file mode 100644
index a7c021b..0000000
--- a/asterix-app/src/test/resources/runtimets/queries_sqlpp/global-aggregate/q08/q08.1.ddl.sqlpp
+++ /dev/null
@@ -1,51 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-drop  database TinySocial if exists;
-create  database TinySocial;
-
-use TinySocial;
-
-
-create type TinySocial.TwitterUserType as
-{
-  "screen-name" : string
-}
-
-create type TinySocial.TweetMessageType as {
-  tweetid : string
-}
-
-create type TinySocial.FacebookUserType as
- open {
-  id : int64
-}
-
-create type TinySocial.FacebookMessageType as
- open {
-  "message-id" : int64
-}
-
-create  table FacebookUsers(FacebookUserType) primary key id;
-create  table FacebookMessages(FacebookMessageType) primary key "message-id";
-create  table TwitterUsers(TwitterUserType) primary key "screen-name";
-create  table TweetMessages(TweetMessageType) primary key tweetid hints ("CARDINALITY"="100");
-create  index fbUserSinceIdx  on FacebookUsers ("user-since":datetime) type btree enforced;
-create  index fbAuthorIdx  on FacebookMessages ("author-id":int64) type btree enforced;
-



[20/50] [abbrv] incubator-asterixdb git commit: Merge branch 'master' into hyracks-merge2

Posted by im...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/testsuite.xml
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/testsuite.xml
index 3a5140c,0000000..43c754b
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/testsuite.xml
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/testsuite.xml
@@@ -1,7028 -1,0 +1,7053 @@@
 +<!--
 + ! Licensed to the Apache Software Foundation (ASF) under one
 + ! or more contributor license agreements.  See the NOTICE file
 + ! distributed with this work for additional information
 + ! regarding copyright ownership.  The ASF licenses this file
 + ! to you under the Apache License, Version 2.0 (the
 + ! "License"); you may not use this file except in compliance
 + ! with the License.  You may obtain a copy of the License at
 + !
 + !   http://www.apache.org/licenses/LICENSE-2.0
 + !
 + ! Unless required by applicable law or agreed to in writing,
 + ! software distributed under the License is distributed on an
 + ! "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + ! KIND, either express or implied.  See the License for the
 + ! specific language governing permissions and limitations
 + ! under the License.
 + !-->
 +<!DOCTYPE test-suite [
 +
 +<!ENTITY ComparisonQueries SYSTEM "queries/comparison/ComparisonQueries.xml">
 +<!ENTITY RecordsQueries SYSTEM "queries/records/RecordsQueries.xml">
 +<!ENTITY DeepEqualQueries SYSTEM "queries/comparison/deep_equal/DeepEqualQueries.xml">
 +]>
 +
 +<test-suite
 +        xmlns="urn:xml.testframework.asterix.apache.org"
 +        ResultOffsetPath="results"
 +        QueryOffsetPath="queries"
 +        QueryFileExtension=".aql">
++    <test-group name="external">
++        <test-case FilePath="external">
++            <compilation-unit name="invalid-format">
++                <output-dir compare="Text">invalid-format</output-dir>
++                <expected-error>Unknown format</expected-error>
++            </compilation-unit>
++        </test-case>
++    </test-group>
++    <test-group name="external-indexing">
++        <test-case FilePath="external-indexing">
++            <compilation-unit name="text-format">
++                <output-dir compare="Text">text-format</output-dir>
++            </compilation-unit>
++        </test-case>
++        <test-case FilePath="external-indexing">
++            <compilation-unit name="sequence-format">
++                <output-dir compare="Text">sequence-format</output-dir>
++            </compilation-unit>
++        </test-case>
++        <test-case FilePath="external-indexing">
++            <compilation-unit name="rc-format">
++                <output-dir compare="Text">rc-format</output-dir>
++            </compilation-unit>
++        </test-case>
++        <test-case FilePath="external-indexing">
++            <compilation-unit name="rtree-index">
++                <output-dir compare="Text">rtree-index</output-dir>
++            </compilation-unit>
++        </test-case>
++        <test-case FilePath="external-indexing">
++            <compilation-unit name="leftouterjoin">
++                <output-dir compare="Text">leftouterjoin</output-dir>
++            </compilation-unit>
++        </test-case>
++        <test-case FilePath="external-indexing">
++            <compilation-unit name="leftouterjoin-rtree">
++                <output-dir compare="Text">leftouterjoin-rtree</output-dir>
++            </compilation-unit>
++        </test-case>
++    </test-group>
 +    <test-group name="external-library">
 +        <test-case FilePath="external-library">
 +            <compilation-unit name="typed_adapter">
 +                <output-dir compare="Text">typed_adapter</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="external-library">
-             <compilation-unit name="classad-parser">
-                 <output-dir compare="Text">classad-parser</output-dir>
++            <compilation-unit name="classad-parser-new">
++                <output-dir compare="Text">classad-parser-new</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="external-library">
-             <compilation-unit name="classad-parser2">
-                 <output-dir compare="Text">classad-parser2</output-dir>
++            <compilation-unit name="classad-parser-old">
++                <output-dir compare="Text">classad-parser-old</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="external-library">
 +            <compilation-unit name="getCapital">
 +                <output-dir compare="Text">getCapital</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +    </test-group>
 +    <test-group name="feeds">
 +        <test-case FilePath="feeds">
++            <compilation-unit name="twitter-feed">
++                <output-dir compare="Text">twitter-feed</output-dir>
++                <expected-error>One or more parameters are missing from adapter configuration</expected-error>
++            </compilation-unit>
++        </test-case>
++        <test-case FilePath="feeds">
 +            <compilation-unit name="feed-with-external-parser">
 +                <output-dir compare="Text">feed-with-external-parser</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="feeds">
 +            <compilation-unit name="feed-with-external-function">
 +                <output-dir compare="Text">feed-with-external-function</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="feeds">
 +            <compilation-unit name="change-feed-with-meta-pk-in-meta">
 +                <output-dir compare="Text">change-feed-with-meta-pk-in-meta</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="feeds">
 +            <compilation-unit name="feed-with-filtered-dataset">
 +                <output-dir compare="Text">feed-with-filtered-dataset</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="feeds">
 +            <compilation-unit name="change-feed">
 +                <output-dir compare="Text">change-feed</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="feeds">
 +            <compilation-unit name="feed-with-meta-pk-in-meta">
 +                <output-dir compare="Text">feed-with-meta-pk-in-meta</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="feeds">
 +            <compilation-unit name="feeds_07">
 +                <output-dir compare="Text">feeds_07</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <!-- Fails constantly and not clear what is intended
 +        <test-case FilePath="feeds">
 +          <compilation-unit name="feeds_06">
 +            <output-dir compare="Text">feeds_06</output-dir>
 +          </compilation-unit>
 +        </test-case> -->
 +        <test-case FilePath="feeds">
 +            <compilation-unit name="drop-dataverse-with-disconnected-feed">
 +                <output-dir compare="Text">drop-dataverse-with-disconnected-feed</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="feeds">
 +            <compilation-unit name="feed-push-socket">
 +                <output-dir compare="Text">feed-push-socket</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="feeds">
 +            <compilation-unit name="feeds_01">
 +                <output-dir compare="Text">feeds_01</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="feeds">
 +            <compilation-unit name="feeds_02">
 +                <output-dir compare="Text">feeds_02</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="feeds">
 +            <compilation-unit name="feeds_03">
 +                <output-dir compare="Text">feeds_03</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <!-- Fails constantly
 +        <test-case FilePath="feeds">
 +            <compilation-unit name="feeds_04">
 +                <output-dir compare="Text">feeds_04</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +         -->
 +        <test-case FilePath="feeds">
 +            <compilation-unit name="feeds_08">
 +                <output-dir compare="Text">feeds_08</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="feeds">
 +            <compilation-unit name="feeds_09">
 +                <output-dir compare="Text">feeds_09</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +
 +        <test-case FilePath="feeds">
 +            <compilation-unit name="feeds_10">
 +                <output-dir compare="Text">feeds_10</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +
 +        <test-case FilePath="feeds">
 +            <compilation-unit name="feeds_11">
 +                <output-dir compare="Text">feeds_11</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="feeds">
 +            <compilation-unit name="feeds_12">
 +                <output-dir compare="Text">feeds_12</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +
 +        <test-case FilePath="feeds">
 +            <compilation-unit name="issue_230_feeds">
 +                <output-dir compare="Text">issue_230_feeds</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <!--<test-case FilePath="feeds">
 +            <compilation-unit name="issue_711_feeds">
 +                <output-dir compare="Text">issue_711_feeds</output-dir>
 +            </compilation-unit>
 +        </test-case>-->
 +    </test-group>
 +    <test-group name="upsert">
 +        <test-case FilePath="upsert">
 +            <compilation-unit name="filtered-dataset">
 +                <output-dir compare="Text">filtered-dataset</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="upsert">
 +            <compilation-unit name="nested-index">
 +                <output-dir compare="Text">nested-index</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="upsert">
 +            <compilation-unit name="primary-secondary-rtree">
 +                <output-dir compare="Text">primary-secondary-rtree</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="upsert">
 +            <compilation-unit name="upsert-with-self-read">
 +                <output-dir compare="Text">upsert-with-self-read</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="upsert">
 +            <compilation-unit name="nullable-index">
 +                <output-dir compare="Text">nullable-index</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="upsert">
 +            <compilation-unit name="open-index">
 +                <output-dir compare="Text">open-index</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="upsert">
 +            <compilation-unit name="primary-index">
 +                <output-dir compare="Text">primary-index</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="upsert">
 +            <compilation-unit name="primary-secondary-btree">
 +                <output-dir compare="Text">primary-secondary-btree</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="upsert">
 +            <compilation-unit name="primary-secondary-inverted">
 +                <output-dir compare="Text">primary-secondary-inverted</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="upsert">
 +            <compilation-unit name="multiple-secondaries">
 +                <output-dir compare="Text">multiple-secondaries</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +    </test-group>
 +    <test-group name="flwor">
 +        <test-case FilePath="flwor">
 +            <compilation-unit name="at00">
 +                <output-dir compare="Text">at00</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="flwor">
 +            <compilation-unit name="at01">
 +                <output-dir compare="Text">at01</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="flwor">
 +            <compilation-unit name="at02">
 +                <output-dir compare="Text">at02</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="flwor">
 +            <compilation-unit name="at03">
 +                <output-dir compare="Text">at03</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="flwor">
 +            <compilation-unit name="at04">
 +                <output-dir compare="Text">at04</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="flwor">
 +            <compilation-unit name="at05">
 +                <output-dir compare="Text">at05</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="flwor">
 +            <compilation-unit name="at06">
 +                <output-dir compare="Text">at06</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="flwor">
 +            <compilation-unit name="query-issue550">
 +                <output-dir compare="Text">query-issue550</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +    </test-group>
 +    <test-group name="union">
 +        <test-case FilePath="union">
 +            <compilation-unit name="union">
 +                <output-dir compare="Text">union</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="union">
 +            <compilation-unit name="union2">
 +                <output-dir compare="Text">union2</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="union">
 +            <compilation-unit name="query-ASTERIXDB-300">
 +                <output-dir compare="Text">query-ASTERIXDB-300</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="union">
 +            <compilation-unit name="query-ASTERIXDB-1205">
 +                <output-dir compare="Text">query-ASTERIXDB-1205</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="union">
 +            <compilation-unit name="query-ASTERIXDB-1205-2">
 +                <output-dir compare="Text">query-ASTERIXDB-1205-2</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="union">
 +            <compilation-unit name="query-ASTERIXDB-1205-3">
 +                <output-dir compare="Text">query-ASTERIXDB-1205-3</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="union">
 +            <compilation-unit name="query-ASTERIXDB-1047">
 +                <output-dir compare="Text">query-ASTERIXDB-1047</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +    </test-group>
 +    <test-group name="aggregate">
 +        <test-case FilePath="aggregate">
 +            <compilation-unit name="issue531_string_min_max">
 +                <output-dir compare="Text">issue531_string_min_max</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate">
 +            <compilation-unit name="agg_null">
 +                <output-dir compare="Text">agg_null</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate">
 +            <compilation-unit name="agg_null_rec">
 +                <output-dir compare="Text">agg_null_rec</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate">
 +            <compilation-unit name="agg_null_rec_1">
 +                <output-dir compare="Text">agg_null_rec_1</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate">
 +            <compilation-unit name="agg_number_rec">
 +                <output-dir compare="Text">agg_number_rec</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate">
 +            <compilation-unit name="avg_mixed">
 +                <output-dir compare="Text">avg_mixed</output-dir>
 +                <expected-error>org.apache.hyracks.algebricks.common.exceptions.AlgebricksException: Unexpected type STRING</expected-error>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate">
 +            <compilation-unit name="sum_mixed">
 +                <output-dir compare="Text">sum_mixed</output-dir>
 +                <expected-error>org.apache.hyracks.algebricks.common.exceptions.AlgebricksException: Unexpected type STRING</expected-error>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate">
 +            <compilation-unit name="min_mixed">
 +                <output-dir compare="Text">min_mixed</output-dir>
 +                <expected-error>org.apache.hyracks.algebricks.common.exceptions.AlgebricksException: Unexpected type STRING</expected-error>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate">
 +            <compilation-unit name="agg_number">
 +                <output-dir compare="Text">agg_number</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate">
 +            <compilation-unit name="issue425_min_hetero_list_1">
 +                <output-dir compare="Text">issue425_min_hetero_list_1</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate">
 +            <compilation-unit name="issue425_min_hetero_list">
 +                <output-dir compare="Text">issue425_min_hetero_list</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate">
 +            <compilation-unit name="issue425_sum_hetero_list_1">
 +                <output-dir compare="Text">issue425_sum_hetero_list_1</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate">
 +            <compilation-unit name="issue425_sum_hetero_list">
 +                <output-dir compare="Text">issue425_sum_hetero_list</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate">
 +            <compilation-unit name="query-issue400">
 +                <output-dir compare="Text">query-issue400</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate">
 +            <compilation-unit name="issue395">
 +                <output-dir compare="Text">issue395</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate">
 +            <compilation-unit name="issue412_0">
 +                <output-dir compare="Text">issue412_0</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate">
 +            <compilation-unit name="issue412_1">
 +                <output-dir compare="Text">issue412_1</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate">
 +            <compilation-unit name="avg_double">
 +                <output-dir compare="Text">avg_double</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate">
 +            <compilation-unit name="avg_double_null">
 +                <output-dir compare="Text">avg_double_null</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate">
 +            <compilation-unit name="avg_empty_01">
 +                <output-dir compare="Text">avg_empty_01</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate">
 +            <compilation-unit name="avg_empty_02">
 +                <output-dir compare="Text">avg_empty_02</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate">
 +            <compilation-unit name="avg_float">
 +                <output-dir compare="Text">avg_float</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate">
 +            <compilation-unit name="avg_float_null">
 +                <output-dir compare="Text">avg_float_null</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate">
 +            <compilation-unit name="avg_int16">
 +                <output-dir compare="Text">avg_int16</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate">
 +            <compilation-unit name="avg_int16_null">
 +                <output-dir compare="Text">avg_int16_null</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate">
 +            <compilation-unit name="avg_int32">
 +                <output-dir compare="Text">avg_int32</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate">
 +            <compilation-unit name="avg_int32_null">
 +                <output-dir compare="Text">avg_int32_null</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate">
 +            <compilation-unit name="avg_int64">
 +                <output-dir compare="Text">avg_int64</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate">
 +            <compilation-unit name="avg_int64_null">
 +                <output-dir compare="Text">avg_int64_null</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate">
 +            <compilation-unit name="avg_int8">
 +                <output-dir compare="Text">avg_int8</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate">
 +            <compilation-unit name="avg_int8_null">
 +                <output-dir compare="Text">avg_int8_null</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate">
 +            <compilation-unit name="count_01">
 +                <output-dir compare="Text">count_01</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate">
 +            <compilation-unit name="count_empty_01">
 +                <output-dir compare="Text">count_empty_01</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate">
 +            <compilation-unit name="count_empty_02">
 +                <output-dir compare="Text">count_empty_02</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate">
 +            <compilation-unit name="count_null">
 +                <output-dir compare="Text">count_null</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +         <test-case FilePath="aggregate">
 +            <compilation-unit name="query-ASTERIXDB-923">
 +                <output-dir compare="Text">query-ASTERIXDB-923</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate">
 +            <compilation-unit name="group_only">
 +                <output-dir compare="Text">group_only</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <!--
 +        <test-case FilePath="aggregate">
 +          <compilation-unit name="droptype">
 +            <output-dir compare="Text">droptype</output-dir>
 +          </compilation-unit>
 +        </test-case>
 +        -->
 +        <!-- TODO(madhusudancs): These tests that test for local_<agg>/global_<agg> functions should be removed, but
 +        before that we should modify the code to make sure those built-in functions are still defined but not exposed
 +        by AQL, so leaving these test cases commented.
 +        <test-case FilePath="aggregate">
 +          <compilation-unit name="global-avg_01">
 +            <output-dir compare="Text">global-avg_01</output-dir>
 +          </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate">
 +          <compilation-unit name="global-avg_null">
 +            <output-dir compare="Text">global-avg_null</output-dir>
 +          </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate">
 +          <compilation-unit name="local-avg_double">
 +            <output-dir compare="Text">local-avg_double</output-dir>
 +          </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate">
 +          <compilation-unit name="local-avg_double_null">
 +            <output-dir compare="Text">local-avg_double_null</output-dir>
 +          </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate">
 +          <compilation-unit name="local-avg_float">
 +            <output-dir compare="Text">local-avg_float</output-dir>
 +          </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate">
 +          <compilation-unit name="local-avg_float_null">
 +            <output-dir compare="Text">local-avg_float_null</output-dir>
 +          </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate">
 +          <compilation-unit name="local-avg_int16">
 +            <output-dir compare="Text">local-avg_int16</output-dir>
 +          </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate">
 +          <compilation-unit name="local-avg_int16_null">
 +            <output-dir compare="Text">local-avg_int16_null</output-dir>
 +          </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate">
 +          <compilation-unit name="local-avg_int32">
 +            <output-dir compare="Text">local-avg_int32</output-dir>
 +          </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate">
 +          <compilation-unit name="local-avg_int32_null">
 +            <output-dir compare="Text">local-avg_int32_null</output-dir>
 +          </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate">
 +          <compilation-unit name="local-avg_int64">
 +            <output-dir compare="Text">local-avg_int64</output-dir>
 +          </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate">
 +          <compilation-unit name="local-avg_int64_null">
 +            <output-dir compare="Text">local-avg_int64_null</output-dir>
 +          </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate">
 +          <compilation-unit name="local-avg_int8">
 +            <output-dir compare="Text">local-avg_int8</output-dir>
 +          </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate">
 +          <compilation-unit name="local-avg_int8_null">
 +            <output-dir compare="Text">local-avg_int8_null</output-dir>
 +          </compilation-unit>
 +        </test-case>
 +        -->
 +        <test-case FilePath="aggregate">
 +            <compilation-unit name="max_empty_01">
 +                <output-dir compare="Text">max_empty_01</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate">
 +            <compilation-unit name="max_empty_02">
 +                <output-dir compare="Text">max_empty_02</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate">
 +            <compilation-unit name="min_empty_01">
 +                <output-dir compare="Text">min_empty_01</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate">
 +            <compilation-unit name="min_empty_02">
 +                <output-dir compare="Text">min_empty_02</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate">
 +            <compilation-unit name="scalar_avg">
 +                <output-dir compare="Text">scalar_avg</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate">
 +            <compilation-unit name="scalar_avg_empty">
 +                <output-dir compare="Text">scalar_avg_empty</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate">
 +            <compilation-unit name="scalar_avg_null">
 +                <output-dir compare="Text">scalar_avg_null</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate">
 +            <compilation-unit name="scalar_count">
 +                <output-dir compare="Text">scalar_count</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate">
 +            <compilation-unit name="scalar_count_empty">
 +                <output-dir compare="Text">scalar_count_empty</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate">
 +            <compilation-unit name="scalar_count_null">
 +                <output-dir compare="Text">scalar_count_null</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate">
 +            <compilation-unit name="scalar_max">
 +                <output-dir compare="Text">scalar_max</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate">
 +            <compilation-unit name="scalar_max_empty">
 +                <output-dir compare="Text">scalar_max_empty</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate">
 +            <compilation-unit name="scalar_max_null">
 +                <output-dir compare="Text">scalar_max_null</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate">
 +            <compilation-unit name="scalar_min">
 +                <output-dir compare="Text">scalar_min</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate">
 +            <compilation-unit name="scalar_min_empty">
 +                <output-dir compare="Text">scalar_min_empty</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate">
 +            <compilation-unit name="scalar_min_null">
 +                <output-dir compare="Text">scalar_min_null</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate">
 +            <compilation-unit name="scalar_sum">
 +                <output-dir compare="Text">scalar_sum</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate">
 +            <compilation-unit name="scalar_sum_empty">
 +                <output-dir compare="Text">scalar_sum_empty</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate">
 +            <compilation-unit name="scalar_sum_null">
 +                <output-dir compare="Text">scalar_sum_null</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate">
 +            <compilation-unit name="sum_double">
 +                <output-dir compare="Text">sum_double</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate">
 +            <compilation-unit name="sum_double_null">
 +                <output-dir compare="Text">sum_double_null</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate">
 +            <compilation-unit name="sum_empty_01">
 +                <output-dir compare="Text">sum_empty_01</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate">
 +            <compilation-unit name="sum_empty_02">
 +                <output-dir compare="Text">sum_empty_02</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate">
 +            <compilation-unit name="sum_float">
 +                <output-dir compare="Text">sum_float</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate">
 +            <compilation-unit name="sum_float_null">
 +                <output-dir compare="Text">sum_float_null</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate">
 +            <compilation-unit name="sum_int16">
 +                <output-dir compare="Text">sum_int16</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate">
 +            <compilation-unit name="sum_int16_null">
 +                <output-dir compare="Text">sum_int16_null</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate">
 +            <compilation-unit name="sum_int32">
 +                <output-dir compare="Text">sum_int32</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate">
 +            <compilation-unit name="sum_int32_null">
 +                <output-dir compare="Text">sum_int32_null</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate">
 +            <compilation-unit name="sum_int64">
 +                <output-dir compare="Text">sum_int64</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate">
 +            <compilation-unit name="sum_int64_null">
 +                <output-dir compare="Text">sum_int64_null</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate">
 +            <compilation-unit name="sum_int8">
 +                <output-dir compare="Text">sum_int8</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate">
 +            <compilation-unit name="sum_int8_null">
 +                <output-dir compare="Text">sum_int8_null</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate">
 +            <compilation-unit name="sum_null-with-pred">
 +                <output-dir compare="Text">sum_null-with-pred</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate">
 +            <compilation-unit name="sum_numeric_null">
 +                <output-dir compare="Text">sum_numeric_null</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +    </test-group>
 +    <test-group name="aggregate-sql">
 +        <test-case FilePath="aggregate-sql">
 +            <compilation-unit name="issue531_string_min_max">
 +                <output-dir compare="Text">issue531_string_min_max</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate-sql">
 +            <compilation-unit name="agg_null">
 +                <output-dir compare="Text">agg_null</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate-sql">
 +            <compilation-unit name="agg_null_rec">
 +                <output-dir compare="Text">agg_null_rec</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate-sql">
 +            <compilation-unit name="agg_null_rec_1">
 +                <output-dir compare="Text">agg_null_rec_1</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate-sql">
 +            <compilation-unit name="agg_number_rec">
 +                <output-dir compare="Text">agg_number_rec</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate-sql">
 +            <compilation-unit name="avg_mixed">
 +                <output-dir compare="Text">avg_mixed</output-dir>
 +                <expected-error>org.apache.hyracks.algebricks.common.exceptions.AlgebricksException: Unexpected type STRING</expected-error>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate-sql">
 +            <compilation-unit name="sum_mixed">
 +                <output-dir compare="Text">sum_mixed</output-dir>
 +                <expected-error>org.apache.hyracks.algebricks.common.exceptions.AlgebricksException: Unexpected type STRING</expected-error>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate-sql">
 +            <compilation-unit name="min_mixed">
 +                <output-dir compare="Text">min_mixed</output-dir>
 +                <expected-error>org.apache.hyracks.algebricks.common.exceptions.AlgebricksException: Unexpected type STRING</expected-error>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate-sql">
 +            <compilation-unit name="agg_number">
 +                <output-dir compare="Text">agg_number</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate-sql">
 +            <compilation-unit name="issue425_min_hetero_list_1">
 +                <output-dir compare="Text">issue425_min_hetero_list_1</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate-sql">
 +            <compilation-unit name="issue425_min_hetero_list">
 +                <output-dir compare="Text">issue425_min_hetero_list</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate-sql">
 +            <compilation-unit name="issue425_sum_hetero_list_1">
 +                <output-dir compare="Text">issue425_sum_hetero_list_1</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate-sql">
 +            <compilation-unit name="issue425_sum_hetero_list">
 +                <output-dir compare="Text">issue425_sum_hetero_list</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate-sql">
 +            <compilation-unit name="query-issue400">
 +                <output-dir compare="Text">query-issue400</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate-sql">
 +            <compilation-unit name="issue395">
 +                <output-dir compare="Text">issue395</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate-sql">
 +            <compilation-unit name="issue412_0">
 +                <output-dir compare="Text">issue412_0</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate-sql">
 +            <compilation-unit name="issue412_1">
 +                <output-dir compare="Text">issue412_1</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate-sql">
 +            <compilation-unit name="avg_double">
 +                <output-dir compare="Text">avg_double</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate-sql">
 +            <compilation-unit name="avg_double_null">
 +                <output-dir compare="Text">avg_double_null</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate-sql">
 +            <compilation-unit name="avg_empty_01">
 +                <output-dir compare="Text">avg_empty_01</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate-sql">
 +            <compilation-unit name="avg_empty_02">
 +                <output-dir compare="Text">avg_empty_02</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate-sql">
 +            <compilation-unit name="avg_float">
 +                <output-dir compare="Text">avg_float</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate-sql">
 +            <compilation-unit name="avg_float_null">
 +                <output-dir compare="Text">avg_float_null</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate-sql">
 +            <compilation-unit name="avg_int16">
 +                <output-dir compare="Text">avg_int16</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate-sql">
 +            <compilation-unit name="avg_int16_null">
 +                <output-dir compare="Text">avg_int16_null</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate-sql">
 +            <compilation-unit name="avg_int32">
 +                <output-dir compare="Text">avg_int32</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate-sql">
 +            <compilation-unit name="avg_int32_null">
 +                <output-dir compare="Text">avg_int32_null</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate-sql">
 +            <compilation-unit name="avg_int64">
 +                <output-dir compare="Text">avg_int64</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate-sql">
 +            <compilation-unit name="avg_int64_null">
 +                <output-dir compare="Text">avg_int64_null</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate-sql">
 +            <compilation-unit name="avg_int8">
 +                <output-dir compare="Text">avg_int8</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate-sql">
 +            <compilation-unit name="avg_int8_null">
 +                <output-dir compare="Text">avg_int8_null</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate-sql">
 +            <compilation-unit name="count_01">
 +                <output-dir compare="Text">count_01</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate-sql">
 +            <compilation-unit name="count_empty_01">
 +                <output-dir compare="Text">count_empty_01</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate-sql">
 +            <compilation-unit name="count_empty_02">
 +                <output-dir compare="Text">count_empty_02</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate-sql">
 +            <compilation-unit name="count_null">
 +                <output-dir compare="Text">count_null</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate-sql">
 +            <compilation-unit name="max_empty_01">
 +                <output-dir compare="Text">max_empty_01</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate-sql">
 +            <compilation-unit name="max_empty_02">
 +                <output-dir compare="Text">max_empty_02</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate-sql">
 +            <compilation-unit name="min_empty_01">
 +                <output-dir compare="Text">min_empty_01</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate-sql">
 +            <compilation-unit name="min_empty_02">
 +                <output-dir compare="Text">min_empty_02</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate-sql">
 +            <compilation-unit name="scalar_avg">
 +                <output-dir compare="Text">scalar_avg</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate-sql">
 +            <compilation-unit name="scalar_avg_empty">
 +                <output-dir compare="Text">scalar_avg_empty</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate-sql">
 +            <compilation-unit name="scalar_avg_null">
 +                <output-dir compare="Text">scalar_avg_null</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate-sql">
 +            <compilation-unit name="scalar_count">
 +                <output-dir compare="Text">scalar_count</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate-sql">
 +            <compilation-unit name="scalar_count_empty">
 +                <output-dir compare="Text">scalar_count_empty</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate-sql">
 +            <compilation-unit name="scalar_count_null">
 +                <output-dir compare="Text">scalar_count_null</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate-sql">
 +            <compilation-unit name="scalar_max">
 +                <output-dir compare="Text">scalar_max</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate-sql">
 +            <compilation-unit name="scalar_max_empty">
 +                <output-dir compare="Text">scalar_max_empty</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate-sql">
 +            <compilation-unit name="scalar_max_null">
 +                <output-dir compare="Text">scalar_max_null</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate-sql">
 +            <compilation-unit name="scalar_min">
 +                <output-dir compare="Text">scalar_min</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate-sql">
 +            <compilation-unit name="scalar_min_empty">
 +                <output-dir compare="Text">scalar_min_empty</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate-sql">
 +            <compilation-unit name="scalar_min_null">
 +                <output-dir compare="Text">scalar_min_null</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate-sql">
 +            <compilation-unit name="scalar_sum">
 +                <output-dir compare="Text">scalar_sum</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate-sql">
 +            <compilation-unit name="scalar_sum_empty">
 +                <output-dir compare="Text">scalar_sum_empty</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate-sql">
 +            <compilation-unit name="scalar_sum_null">
 +                <output-dir compare="Text">scalar_sum_null</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate-sql">
 +            <compilation-unit name="sum_double">
 +                <output-dir compare="Text">sum_double</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate-sql">
 +            <compilation-unit name="sum_double_null">
 +                <output-dir compare="Text">sum_double_null</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate-sql">
 +            <compilation-unit name="sum_empty_01">
 +                <output-dir compare="Text">sum_empty_01</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate-sql">
 +            <compilation-unit name="sum_empty_02">
 +                <output-dir compare="Text">sum_empty_02</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate-sql">
 +            <compilation-unit name="sum_float">
 +                <output-dir compare="Text">sum_float</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate-sql">
 +            <compilation-unit name="sum_float_null">
 +                <output-dir compare="Text">sum_float_null</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate-sql">
 +            <compilation-unit name="sum_int16">
 +                <output-dir compare="Text">sum_int16</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate-sql">
 +            <compilation-unit name="sum_int16_null">
 +                <output-dir compare="Text">sum_int16_null</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate-sql">
 +            <compilation-unit name="sum_int32">
 +                <output-dir compare="Text">sum_int32</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate-sql">
 +            <compilation-unit name="sum_int32_null">
 +                <output-dir compare="Text">sum_int32_null</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate-sql">
 +            <compilation-unit name="sum_int64">
 +                <output-dir compare="Text">sum_int64</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate-sql">
 +            <compilation-unit name="sum_int64_null">
 +                <output-dir compare="Text">sum_int64_null</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate-sql">
 +            <compilation-unit name="sum_int8">
 +                <output-dir compare="Text">sum_int8</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate-sql">
 +            <compilation-unit name="sum_int8_null">
 +                <output-dir compare="Text">sum_int8_null</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate-sql">
 +            <compilation-unit name="sum_null-with-pred">
 +                <output-dir compare="Text">sum_null-with-pred</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate-sql">
 +            <compilation-unit name="sum_numeric_null">
 +                <output-dir compare="Text">sum_numeric_null</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +    </test-group>
 +    <test-group name="boolean">
 +        <test-case FilePath="boolean">
 +            <compilation-unit name="and_01">
 +                <output-dir compare="Text">and_01</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="boolean">
 +            <compilation-unit name="and_null">
 +                <output-dir compare="Text">and_null</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="boolean">
 +            <compilation-unit name="and_null_false">
 +                <output-dir compare="Text">and_null_false</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="boolean">
 +            <compilation-unit name="not_01">
 +                <output-dir compare="Text">not_01</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +    </test-group>
 +    <test-group name="comparison">
 +        &ComparisonQueries;
 +    </test-group>
 +    <test-group name="constructor">
 +        <test-case FilePath="constructor">
 +            <compilation-unit name="binary_01">
 +                <output-dir compare="Text">binary_01</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="constructor">
 +            <compilation-unit name="add-null">
 +                <output-dir compare="Text">add-null</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="constructor">
 +            <compilation-unit name="boolean_01">
 +                <output-dir compare="Text">boolean_01</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="constructor">
 +            <compilation-unit name="circle_01">
 +                <output-dir compare="Text">circle_01</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="constructor">
 +            <compilation-unit name="date_01">
 +                <output-dir compare="Text">date_01</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="constructor">
 +            <compilation-unit name="datetime_01">
 +                <output-dir compare="Text">datetime_01</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="constructor">
 +            <compilation-unit name="double_01">
 +                <output-dir compare="Text">double_01</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="constructor">
 +            <compilation-unit name="duration_01">
 +                <output-dir compare="Text">duration_01</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="constructor">
 +            <compilation-unit name="duration_02">
 +                <output-dir compare="Text">duration_02</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="constructor">
 +            <compilation-unit name="float_01">
 +                <output-dir compare="Text">float_01</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="constructor">
 +            <compilation-unit name="int_01">
 +                <output-dir compare="Text">int_01</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="constructor">
 +            <compilation-unit name="interval">
 +                <output-dir compare="Text">interval</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="constructor">
 +            <compilation-unit name="line_01">
 +                <output-dir compare="Text">line_01</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="constructor">
 +            <compilation-unit name="rectangle_01">
 +                <output-dir compare="Text">rectangle_01</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="constructor">
 +            <compilation-unit name="point_01">
 +                <output-dir compare="Text">point_01</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="constructor">
 +            <compilation-unit name="polygon_01">
 +                <output-dir compare="Text">polygon_01</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="constructor">
 +            <compilation-unit name="primitive-01">
 +                <output-dir compare="Text">primitive-01</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="constructor">
 +            <compilation-unit name="primitive-02">
 +                <output-dir compare="Text">primitive-02</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="constructor">
 +            <compilation-unit name="primitive-03">
 +                <output-dir compare="Text">primitive-03</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="constructor">
 +            <compilation-unit name="primitive-04">
 +                <output-dir compare="Text">primitive-04</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="constructor">
 +            <compilation-unit name="string_01">
 +                <output-dir compare="Text">string_01</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="constructor">
 +            <compilation-unit name="time_01">
 +                <output-dir compare="Text">time_01</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="constructor">
 +            <compilation-unit name="uuid_01">
 +                <output-dir compare="Text">uuid_01</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +    </test-group>
 +    <test-group name="custord">
 +        <!--
 +    <test-case FilePath="custord">
 +      <compilation-unit name="co">
 +        <output-dir compare="Text">co</output-dir>
 +      </compilation-unit>
 +    </test-case>
 +    -->
 +        <test-case FilePath="custord">
 +            <compilation-unit name="customer_q_01">
 +                <output-dir compare="Text">customer_q_01</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="custord">
 +            <compilation-unit name="customer_q_02">
 +                <output-dir compare="Text">customer_q_02</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="custord">
 +            <compilation-unit name="customer_q_03">
 +                <output-dir compare="Text">customer_q_03</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="custord">
 +            <compilation-unit name="customer_q_04">
 +                <output-dir compare="Text">customer_q_04</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="custord">
 +            <compilation-unit name="customer_q_05">
 +                <output-dir compare="Text">customer_q_05</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="custord">
 +            <compilation-unit name="customer_q_06">
 +                <output-dir compare="Text">customer_q_06</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="custord">
 +            <compilation-unit name="customer_q_07">
 +                <output-dir compare="Text">customer_q_07</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="custord">
 +            <compilation-unit name="customer_q_08">
 +                <output-dir compare="Text">customer_q_08</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <!--
 +        <test-case FilePath="custord">
 +          <compilation-unit name="denorm-cust-order_01">
 +            <output-dir compare="Text">denorm-cust-order_01</output-dir>
 +          </compilation-unit>
 +        </test-case>
 +        -->
 +        <test-case FilePath="custord">
 +            <compilation-unit name="denorm-cust-order_02">
 +                <output-dir compare="Text">denorm-cust-order_02</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <!--
 +        <test-case FilePath="custord">
 +          <compilation-unit name="denorm-cust-order_03">
 +            <output-dir compare="Text">denorm-cust-order_03</output-dir>
 +          </compilation-unit>
 +        </test-case>
 +        -->
 +        <!--
 +        <test-case FilePath="custord">
 +          <compilation-unit name="freq-clerk">
 +            <output-dir compare="Text">freq-clerk</output-dir>
 +          </compilation-unit>
 +        </test-case>
 +        -->
 +        <test-case FilePath="custord">
 +            <compilation-unit name="join_q_01">
 +                <output-dir compare="Text">join_q_01</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="custord">
 +            <compilation-unit name="join_q_02">
 +                <output-dir compare="Text">join_q_02</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="custord">
 +            <compilation-unit name="join_q_03">
 +                <output-dir compare="Text">join_q_03</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="custord">
 +            <compilation-unit name="join_q_04">
 +                <output-dir compare="Text">join_q_04</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="custord">
 +            <compilation-unit name="load-test">
 +                <output-dir compare="Text">load-test</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="custord">
 +            <compilation-unit name="order_q_01">
 +                <output-dir compare="Text">order_q_01</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="custord">
 +            <compilation-unit name="order_q_02">
 +                <output-dir compare="Text">order_q_02</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="custord">
 +            <compilation-unit name="order_q_03">
 +                <output-dir compare="Text">order_q_03</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="custord">
 +            <compilation-unit name="order_q_04">
 +                <output-dir compare="Text">order_q_04</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="custord">
 +            <compilation-unit name="order_q_05">
 +                <output-dir compare="Text">order_q_05</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="custord">
 +            <compilation-unit name="order_q_06">
 +                <output-dir compare="Text">order_q_06</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +    </test-group>
 +    <test-group name="dapd">
 +        <test-case FilePath="dapd">
 +            <compilation-unit name="q1">
 +                <output-dir compare="Text">q1</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="dapd">
 +            <compilation-unit name="q2">
 +                <output-dir compare="Text">q2</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <!--
 +        <test-case FilePath="dapd">
 +          <compilation-unit name="q3">
 +            <output-dir compare="Text">q3</output-dir>
 +          </compilation-unit>
 +        </test-case>
 +        -->
 +    </test-group>
 +    <test-group name="dml">
 +        <test-case FilePath="dml">
 +            <compilation-unit name="insert-dataset-with-meta">
 +                <output-dir compare="Text">insert-dataset-with-meta</output-dir>
 +                <expected-error>insert into dataset is not supported on Datasets with Meta records</expected-error>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="dml">
 +            <compilation-unit name="delete-dataset-with-meta">
 +                <output-dir compare="Text">delete-dataset-with-meta</output-dir>
 +                <expected-error>delete from dataset is not supported on Datasets with Meta records</expected-error>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="dml">
 +            <compilation-unit name="upsert-dataset-with-meta">
 +                <output-dir compare="Text">upsert-dataset-with-meta</output-dir>
 +                <expected-error>upsert into dataset is not supported on Datasets with Meta records</expected-error>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="dml">
 +            <compilation-unit name="load-with-ngram-index">
 +                <output-dir compare="Text">load-with-ngram-index</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +         <test-case FilePath="dml">
 +            <compilation-unit name="insert-duplicated-keys-from-query">
 +                <output-dir compare="Text">insert-duplicated-keys-from-query</output-dir>
 +                <expected-error>org.apache.hyracks.storage.am.common.exceptions.TreeIndexDuplicateKeyException: Failed to insert key since key already exists</expected-error>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="dml">
 +            <compilation-unit name="compact-dataset-and-its-indexes">
 +                <output-dir compare="Text">compact-dataset-and-its-indexes</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="dml">
 +            <compilation-unit name="using-constant-merge-policy">
 +                <output-dir compare="Text">using-constant-merge-policy</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="dml">
 +            <compilation-unit name="using-prefix-merge-policy">
 +                <output-dir compare="Text">using-prefix-merge-policy</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="dml">
 +            <compilation-unit name="using-correlated-prefix-merge-policy">
 +                <output-dir compare="Text">using-correlated-prefix-merge-policy</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="dml">
 +            <compilation-unit name="using-no-merge-policy">
 +                <output-dir compare="Text">using-no-merge-policy</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="dml">
 +            <compilation-unit name="query-issue382">
 +                <output-dir compare="Text">query-issue382</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="dml">
 +            <compilation-unit name="query-issue433">
 +                <output-dir compare="Text">query-issue433</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="dml">
 +            <compilation-unit name="query-issue288">
 +                <output-dir compare="Text">query-issue288</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="dml">
 +            <compilation-unit name="query-issue205">
 +                <output-dir compare="Text">query-issue205</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="dml">
 +            <compilation-unit name="delete-from-loaded-dataset-with-index">
 +                <output-dir compare="Text">delete-from-loaded-dataset-with-index</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="dml">
 +            <compilation-unit name="delete-from-loaded-dataset">
 +                <output-dir compare="Text">delete-from-loaded-dataset</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="dml">
 +            <compilation-unit name="delete-syntax-change">
 +                <output-dir compare="Text">delete-syntax-change</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="dml">
 +            <compilation-unit name="drop-empty-secondary-indexes">
 +                <output-dir compare="Text">drop-empty-secondary-indexes</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="dml">
 +            <compilation-unit name="drop-index">
 +                <output-dir compare="Text">drop-index</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="dml">
 +            <compilation-unit name="create-drop-cltype">
 +                <output-dir compare="Text">create-drop-cltype</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="dml">
 +            <compilation-unit name="create-drop-opntype">
 +                <output-dir compare="Text">create-drop-opntype</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="dml">
 +            <compilation-unit name="empty-load-with-index">
 +                <output-dir compare="Text">empty-load-with-index</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="dml">
 +            <compilation-unit name="insert-into-empty-dataset">
 +                <output-dir compare="Text">insert-into-empty-dataset</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="dml">
 +            <compilation-unit name="insert-into-empty-dataset-with-index">
 +                <output-dir compare="Text">insert-into-empty-dataset-with-index</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="dml">
 +            <compilation-unit name="insert-syntax">
 +                <output-dir compare="Text">insert-syntax</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="dml">
 +            <compilation-unit name="insert-and-scan-dataset">
 +                <output-dir compare="Text">insert-and-scan-dataset</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="dml">
 +            <compilation-unit name="insert-and-scan-dataset-with-index">
 +                <output-dir compare="Text">insert-and-scan-dataset-with-index</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="dml">
 +            <compilation-unit name="insert-and-scan-joined-datasets">
 +                <output-dir compare="Text">insert-and-scan-joined-datasets</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="dml">
 +            <compilation-unit name="insert-into-loaded-dataset-with-index_01">
 +                <output-dir compare="Text">insert-into-loaded-dataset-with-index_01</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="dml">
 +            <compilation-unit name="insert-into-loaded-dataset-with-index_02">
 +                <output-dir compare="Text">insert-into-loaded-dataset-with-index_02</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="dml">
 +            <compilation-unit name="insert-into-loaded-dataset_01">
 +                <output-dir compare="Text">insert-into-loaded-dataset_01</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="dml">
 +            <compilation-unit name="insert-into-loaded-dataset_02">
 +                <output-dir compare="Text">insert-into-loaded-dataset_02</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="dml">
 +            <compilation-unit name="insert-src-dst-01">
 +                <output-dir compare="Text">insert-src-dst-01</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="dml">
 +            <compilation-unit name="insert">
 +                <output-dir compare="Text">insert</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="dml">
 +            <compilation-unit name="insert-duplicated-keys">
 +                <output-dir compare="Text">insert-duplicated-keys</output-dir>
 +                <expected-error>org.apache.hyracks.storage.am.common.exceptions.TreeIndexDuplicateKeyException: Failed to insert key since key already exists</expected-error>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="dml">
 +            <compilation-unit name="insert_less_nc">
 +                <output-dir compare="Text">insert_less_nc</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <!--
 +        <test-case FilePath="dml">
 +          <compilation-unit name="load-from-hdfs">
 +            <output-dir compare="Text">load-from-hdfs</output-dir>
 +          </compilation-unit>
 +        </test-case>
 +        -->
 +        <test-case FilePath="dml">
 +            <compilation-unit name="insert-with-autogenerated-pk_adm_01">
 +                <output-dir compare="Text">insert-with-autogenerated-pk_adm_01</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="dml">
 +            <compilation-unit name="insert-with-autogenerated-pk_adm_02">
 +                <output-dir compare="Text">insert-with-autogenerated-pk_adm_02</output-dir>
 +                <expected-error>org.apache.hyracks.algebricks.common.exceptions.AlgebricksException: Duplicate field id encountered</expected-error>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="dml">
 +            <compilation-unit name="insert-with-autogenerated-pk_adm_03">
 +                <output-dir compare="Text">insert-with-autogenerated-pk_adm_03</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="dml">
 +            <compilation-unit name="load-with-autogenerated-pk_txt_01">
 +                <output-dir compare="Text">load-with-autogenerated-pk_txt_01</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="dml">
 +            <compilation-unit name="load-with-autogenerated-pk_adm_01">
 +                <output-dir compare="Text">load-with-autogenerated-pk_adm_01</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="dml">
 +            <compilation-unit name="load-with-autogenerated-pk_adm_02">
 +                <output-dir compare="Text">load-with-autogenerated-pk_adm_02</output-dir>
 +                <expected-error>org.apache.asterix.external.parser.ADMDataParser$ParseException</expected-error>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="dml">
 +            <compilation-unit name="load-with-autogenerated-pk_adm_03">
 +                <output-dir compare="Text">load-with-autogenerated-pk_adm_03</output-dir>
 +                <expected-error>org.apache.asterix.external.parser.ADMDataParser$ParseException</expected-error>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="dml">
 +            <compilation-unit name="load-with-autogenerated-pk_adm_04">
 +                <output-dir compare="Text">load-with-autogenerated-pk_adm_04</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="dml">
 +            <compilation-unit name="load-with-autogenerated-pk_csv_01">
 +                <output-dir compare="Text">load-with-autogenerated-pk_csv_01</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="dml">
 +            <compilation-unit name="load-with-autogenerated-pk_csv_02"><!-- Right now, this only throw exception on expected result!!! -->
 +                <output-dir compare="Text">load-with-autogenerated-pk_csv_02</output-dir>
 +                <expected-error>java.lang.Exception: Result</expected-error>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="dml">
 +            <compilation-unit name="load-with-index">
 +                <output-dir compare="Text">load-with-index</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="dml">
 +            <compilation-unit name="load-with-rtree-index">
 +                <output-dir compare="Text">load-with-rtree-index</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="dml">
 +            <compilation-unit name="load-with-word-index">
 +                <output-dir compare="Text">load-with-word-index</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="dml">
 +            <compilation-unit name="opentype-c2o-recursive">
 +                <output-dir compare="Text">opentype-c2o-recursive</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="dml">
 +            <compilation-unit name="opentype-c2o">
 +                <output-dir compare="Text">opentype-c2o</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="dml">
 +            <compilation-unit name="opentype-closed-optional">
 +                <output-dir compare="Text">opentype-closed-optional</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="dml">
 +            <compilation-unit name="opentype-insert">
 +                <output-dir compare="Text">opentype-insert</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="dml">
 +            <compilation-unit name="opentype-insert2">
 +                <output-dir compare="Text">opentype-insert2</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="dml">
 +            <compilation-unit name="opentype-noexpand">
 +                <output-dir compare="Text">opentype-noexpand</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="dml">
 +            <compilation-unit name="opentype-o2c-recursive">
 +                <output-dir compare="Text">opentype-o2c-recursive</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="dml">
 +            <compilation-unit name="opentype-o2c">
 +                <output-dir compare="Text">opentype-o2c</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="dml">
 +            <compilation-unit name="opentype-o2o">
 +                <output-dir compare="Text">opentype-o2o</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="dml">
 +            <compilation-unit name="scan-delete-btree-secondary-index-nullable">
 +                <output-dir compare="Text">scan-delete-btree-secondary-index-nullable</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="dml">
 +            <compilation-unit name="scan-delete-rtree-secondary-index-nullable">
 +                <output-dir compare="Text">scan-delete-rtree-secondary-index-nullable</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="dml">
 +            <compilation-unit name="scan-delete-rtree-secondary-index">
 +                <output-dir compare="Text">scan-delete-rtree-secondary-index</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="dml">
 +            <compilation-unit name="scan-insert-btree-secondary-index-nullable">
 +                <output-dir compare="Text">scan-insert-btree-secondary-index-nullable</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="dml">
 +            <compilation-unit name="scan-insert-rtree-secondary-index-nullable">
 +                <output-dir compare="Text">scan-insert-rtree-secondary-index-nullable</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="dml">
 +            <compilation-unit name="scan-insert-rtree-secondary-index">
 +                <output-dir compare="Text">scan-insert-rtree-secondary-index</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="dml">
 +            <compilation-unit name="scan-insert-inverted-index-ngram-secondary-index">
 +                <output-dir compare="Text">scan-insert-inverted-index-ngram-secondary-index</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="dml">
 +            <compilation-unit name="scan-insert-inverted-index-word-secondary-index">
 +                <output-dir compare="Text">scan-insert-inverted-index-word-secondary-index</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="dml">
 +            <compilation-unit name="scan-insert-inverted-index-ngram-secondary-index-nullable">
 +                <output-dir compare="Text">scan-insert-inverted-index-ngram-secondary-index-nullable</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="dml">
 +            <compilation-unit name="scan-insert-inverted-index-word-secondary-index-nullable">
 +                <output-dir compare="Text">scan-insert-inverted-index-word-secondary-index-nullable</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="dml">
 +            <compilation-unit name="scan-delete-inverted-index-ngram-secondary-index">
 +                <output-dir compare="Text">scan-delete-inverted-index-ngram-secondary-index</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="dml">
 +            <compilation-unit name="scan-delete-inverted-index-word-secondary-index">
 +                <output-dir compare="Text">scan-delete-inverted-index-word-secondary-index</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="dml">
 +            <compilation-unit name="scan-delete-inverted-index-ngram-secondary-index-nullable">
 +                <output-dir compare="Text">scan-delete-inverted-index-ngram-secondary-index-nullable</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="dml">
 +            <compilation-unit name="scan-delete-inverted-index-word-secondary-index-nullable">
 +                <output-dir compare="Text">scan-delete-inverted-index-word-secondary-index-nullable</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="dml">
 +            <compilation-unit name="load-with-index-open">
 +                <output-dir compare="Text">load-with-index-open</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="dml">
 +            <compilation-unit name="load-with-index-open_02">
 +                <output-dir compare="Text">load-with-index-open_02</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="dml">
 +            <compilation-unit name="load-with-ngram-index-open">
 +                <output-dir compare="Text">load-with-ngram-index-open</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="dml">
 +            <compilation-unit name="load-with-rtree-index-open">
 +                <output-dir compare="Text">load-with-rtree-index-open</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="dml">
 +            <compilation-unit name="load-with-word-index-open">
 +                <output-dir compare="Text">load-with-word-index-open</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="dml">
 +            <compilation-unit name="scan-delete-btree-secondary-index-open">
 +                <output-dir compare="Text">scan-delete-btree-secondary-index-open</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="dml">
 +            <compilation-unit name="scan-delete-inverted-index-ngram-secondary-index-open">
 +                <output-dir compare="Text">scan-delete-inverted-index-ngram-secondary-index-open</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="dml">
 +            <compilation-unit name="scan-delete-inverted-index-word-secondary-index-open">
 +                <output-dir compare="Text">scan-delete-inverted-index-word-secondary-index-open</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="dml">
 +            <compilation-unit name="scan-delete-rtree-secondary-index-open">
 +                <output-dir compare="Text">scan-delete-rtree-secondary-index-open</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="dml">
 +            <compilation-unit name="scan-insert-btree-secondary-index-open">
 +                <output-dir compare="Text">scan-insert-btree-secondary-index-open</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="dml">
 +            <compilation-unit name="scan-insert-inverted-index-ngram-secondary-index-open">
 +                <output-dir compare="Text">scan-insert-inverted-index-ngram-secondary-index-open</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="dml">
 +            <compilation-unit name="scan-insert-inverted-index-word-secondary-index-open">
 +                <output-dir compare="Text">scan-insert-inverted-index-word-secondary-index-open</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="dml">
 +            <compilation-unit name="scan-insert-rtree-secondary-index-open">
 +                <output-dir compare="Text">scan-insert-rtree-secondary-index-open</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="dml">
 +            <compilation-unit name="delete-multi-statement">
 +                <output-dir compare="Text">delete-multi-statement</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +    </test-group>
 +    <test-group name="employee">
 +        <test-case FilePath="employee">
 +            <compilation-unit name="q_01">
 +                <output-dir compare="Text">q_01</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="employee">
 +            <compilation-unit name="q_02">
 +                <output-dir compare="Text">q_02</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +    </test-group>
 +    <test-group name="failure">
 +        <!--
 +        <test-case FilePath="failure">
 +          <compilation-unit name="q1_pricing_summary_report_failure">
 +            <output-dir compare="Text">q1_pricing_summary_report_failure</output-dir>
 +          </compilation-unit>
 +        </test-case>
 +        -->
 +    </test-group>
 +    <!--
 +    <test-group name="flwor">
 +      <test-case FilePath="flwor">
 +        <compilation-unit name="for01">
 +          <output-dir co

<TRUNCATED>


[24/50] [abbrv] incubator-asterixdb git commit: Merge branch 'master' into hyracks-merge2

Posted by im...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch/q02_minimum_cost_supplier/q02_minimum_cost_supplier.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch/q02_minimum_cost_supplier/q02_minimum_cost_supplier.3.query.sqlpp
index a55c74d,0000000..edae05b
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch/q02_minimum_cost_supplier/q02_minimum_cost_supplier.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch/q02_minimum_cost_supplier/q02_minimum_cost_supplier.3.query.sqlpp
@@@ -1,80 -1,0 +1,80 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +
 +use tpch;
 +
 +
 +declare function tmp1() {
 +(
 +    select element {'s_acctbal':pssrn.s_acctbal,'s_name':pssrn.s_name,'n_name':pssrn.n_name,'p_partkey':p.p_partkey,'ps_supplycost':pssrn.ps_supplycost,'p_mfgr':p.p_mfgr,'s_address':pssrn.s_address,'s_phone':pssrn.s_phone,'s_comment':pssrn.s_comment}
 +    from  Part as p,
 +          (
 +        select element {'n_name':srn.n_name,'p_partkey':ps.ps_partkey,'ps_supplycost':ps.ps_supplycost,'s_name':srn.s_name,'s_acctbal':srn.s_acctbal,'s_address':srn.s_address,'s_phone':srn.s_phone,'s_comment':srn.s_comment}
 +        from  Partsupp as ps,
 +              (
 +            select element {'s_suppkey':s.s_suppkey,'n_name':rn.n_name,'s_name':s.s_name,'s_acctbal':s.s_acctbal,'s_address':s.s_address,'s_phone':s.s_phone,'s_comment':s.s_comment}
 +            from  Supplier as s,
 +                  (
 +                select element {'n_nationkey':n.n_nationkey,'n_name':n.n_name}
 +                from  Region as r,
 +                      Nation as n
 +                where ((n.n_regionkey = r.r_regionkey) and (r.r_name = 'EUROPE'))
 +            ) as rn
 +            where (s.s_nationkey = rn.n_nationkey)
 +        ) as srn
 +        where (srn.s_suppkey = ps.ps_suppkey)
 +    ) as pssrn
 +    where ((p.p_partkey = pssrn.p_partkey) and tpch.like(p.p_type,'%BRASS'))
 +)
 +};
 +declare function tmp2() {
 +(
-     select element {'p_partkey':p_partkey,'ps_min_supplycost':tpch.min((
++    select element {'p_partkey':p_partkey,'ps_min_supplycost':COLL_MIN((
 +            select element i.ps_supplycost
 +            from  pssrn as i
 +        ))}
 +    from  Part as p,
 +          (
 +        select element {'n_name':srn.n_name,'p_partkey':ps.ps_partkey,'ps_supplycost':ps.ps_supplycost,'s_name':srn.s_name,'s_acctbal':srn.s_acctbal,'s_address':srn.s_address,'s_phone':srn.s_phone,'s_comment':srn.s_comment}
 +        from  Partsupp as ps,
 +              (
 +            select element {'s_suppkey':s.s_suppkey,'n_name':rn.n_name,'s_name':s.s_name,'s_acctbal':s.s_acctbal,'s_address':s.s_address,'s_phone':s.s_phone,'s_comment':s.s_comment}
 +            from  Supplier as s,
 +                  (
 +                select element {'n_nationkey':n.n_nationkey,'n_name':n.n_name}
 +                from  Region as r,
 +                      Nation as n
 +                where ((n.n_regionkey = r.r_regionkey) and (r.r_name = 'EUROPE'))
 +            ) as rn
 +            where (s.s_nationkey = rn.n_nationkey)
 +        ) as srn
 +        where (srn.s_suppkey = ps.ps_suppkey)
 +    ) as pssrn
 +    where ((p.p_partkey = pssrn.p_partkey) and tpch.like(p.p_type,'%BRASS'))
 +    /* +hash */
 +    group by pssrn.p_partkey as p_partkey
 +)
 +};
 +select element {'s_acctbal':t1.s_acctbal,'s_name':t1.s_name,'n_name':t1.n_name,'p_partkey':t1.p_partkey,'p_mfgr':t1.p_mfgr,'s_address':t1.s_address,'s_phone':t1.s_phone,'s_comment':t1.s_comment}
 +from  tpch.tmp2() as t2,
 +      tpch.tmp1() as t1
 +where ((t1.p_partkey = t2.p_partkey) and (t1.ps_supplycost = t2.ps_min_supplycost))
 +order by t1.s_acctbal desc,t1.n_name,t1.s_name,t1.p_partkey
 +limit 100
 +;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch/q03_shipping_priority_nt/q03_shipping_priority_nt.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch/q03_shipping_priority_nt/q03_shipping_priority_nt.3.query.sqlpp
index d30c6d3,0000000..ef0d15d
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch/q03_shipping_priority_nt/q03_shipping_priority_nt.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch/q03_shipping_priority_nt/q03_shipping_priority_nt.3.query.sqlpp
@@@ -1,36 -1,0 +1,36 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +
 +use tpch;
 +
 +
 +select element {'l_orderkey':l_orderkey,'revenue':revenue,'o_orderdate':o_orderdate,'o_shippriority':o_shippriority}
 +from  Customer as c,
 +      Orders as o,
 +      LineItem as l
 +where (((c.c_mktsegment = 'BUILDING') and (c.c_custkey = o.o_custkey)) and ((l.l_orderkey = o.o_orderkey) and (o.o_orderdate < '1995-03-15') and (l.l_shipdate > '1995-03-15')))
 +/* +hash */
 +group by l.l_orderkey as l_orderkey,o.o_orderdate as o_orderdate,o.o_shippriority as o_shippriority
- with  revenue as tpch.sum((
++with  revenue as COLL_SUM((
 +      select element (i.l_extendedprice * (1 - i.l_discount))
 +      from  l as i
 +  ))
 +order by revenue desc,o_orderdate
 +limit 10
 +;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch/q05_local_supplier_volume/q05_local_supplier_volume.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch/q05_local_supplier_volume/q05_local_supplier_volume.3.query.sqlpp
index 150e4b1,0000000..f30ccbe
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch/q05_local_supplier_volume/q05_local_supplier_volume.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch/q05_local_supplier_volume/q05_local_supplier_volume.3.query.sqlpp
@@@ -1,54 -1,0 +1,54 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +
 +use tpch;
 +
 +
 +select element {'n_name':n_name,'revenue':revenue}
 +from  Customer as c,
 +      (
 +    select element {'n_name':l1.n_name,'l_extendedprice':l1.l_extendedprice,'l_discount':l1.l_discount,'s_nationkey':l1.s_nationkey,'o_custkey':o.o_custkey}
 +    from  Orders as o,
 +          (
 +        select element {'n_name':s1.n_name,'l_extendedprice':l.l_extendedprice,'l_discount':l.l_discount,'l_orderkey':l.l_orderkey,'s_nationkey':s1.s_nationkey}
 +        from  LineItem as l,
 +              (
 +            select element {'n_name':n1.n_name,'s_suppkey':s.s_suppkey,'s_nationkey':s.s_nationkey}
 +            from  Supplier as s,
 +                  (
 +                select element {'n_name':n.n_name,'n_nationkey':n.n_nationkey}
 +                from  Nation as n,
 +                      Region as r
 +                where (n.n_regionkey = r.r_regionkey)
 +            ) as n1
 +            where (s.s_nationkey = n1.n_nationkey)
 +        ) as s1
 +        where (l.l_suppkey = s1.s_suppkey)
 +    ) as l1
 +    where ((l1.l_orderkey = o.o_orderkey) and (o.o_orderdate >= '1990-01-01') and (o.o_orderdate < '1995-01-01'))
 +) as o1
 +where ((c.c_nationkey = o1.s_nationkey) and (c.c_custkey = o1.o_custkey))
 +/* +hash */
 +group by o1.n_name as n_name
- with  revenue as tpch.sum((
++with  revenue as COLL_SUM((
 +      select element (i.l_extendedprice * (1 - i.l_discount))
 +      from  o1 as i
 +  ))
 +order by revenue desc
 +;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch/q06_forecast_revenue_change/q06_forecast_revenue_change.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch/q06_forecast_revenue_change/q06_forecast_revenue_change.3.query.sqlpp
index 9dee61f,0000000..b960393
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch/q06_forecast_revenue_change/q06_forecast_revenue_change.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch/q06_forecast_revenue_change/q06_forecast_revenue_change.3.query.sqlpp
@@@ -1,27 -1,0 +1,27 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +
 +use tpch;
 +
 +
- {'revenue':tpch.sum((
++{'revenue':COLL_SUM((
 +    select element (l.l_extendedprice * l.l_discount)
 +    from  LineItem as l
 +    where ((l.l_shipdate >= '1994-01-01') and (l.l_shipdate < '1995-01-01') and (l.l_discount >= 0.05) and (l.l_discount <= 0.07) and (l.l_quantity < 24))
 +))};

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch/q07_volume_shipping/q07_volume_shipping.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch/q07_volume_shipping/q07_volume_shipping.3.query.sqlpp
index 74d3b5a,0000000..5ca7eb3
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch/q07_volume_shipping/q07_volume_shipping.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch/q07_volume_shipping/q07_volume_shipping.3.query.sqlpp
@@@ -1,57 -1,0 +1,57 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +
 +use tpch;
 +
 +
 +declare function q7_volume_shipping_tmp() {
 +(
 +    select element {'supp_nation':n1.n_name,'cust_nation':n2.n_name,'s_nationkey':n1.n_nationkey,'c_nationkey':n2.n_nationkey}
 +    from  Nation as n1,
 +          Nation as n2
 +    where ((n2.n_name = 'GERMANY') or (n1.n_name = 'GERMANY'))
 +)
 +};
 +select element {'supp_nation':supp_nation,'cust_nation':cust_nation,'l_year':l_year,'revenue':revenue}
 +from  (
 +    select element {'l_shipdate':loc.l_shipdate,'l_extendedprice':loc.l_extendedprice,'l_discount':loc.l_discount,'c_nationkey':loc.c_nationkey,'s_nationkey':s.s_nationkey}
 +    from  (
 +        select element {'l_shipdate':lo.l_shipdate,'l_extendedprice':lo.l_extendedprice,'l_discount':lo.l_discount,'l_suppkey':lo.l_suppkey,'c_nationkey':c.c_nationkey}
 +        from  (
 +            select element {'l_shipdate':l.l_shipdate,'l_extendedprice':l.l_extendedprice,'l_discount':l.l_discount,'l_suppkey':l.l_suppkey,'o_custkey':o.o_custkey}
 +            from  LineItem as l,
 +                  Orders as o
 +            where ((o.o_orderkey = l.l_orderkey) and (l.l_shipdate >= '1992-01-01') and (l.l_shipdate <= '1996-12-31'))
 +        ) as lo,
 +              Customer as c
 +        where (c.c_custkey = lo.o_custkey)
 +    ) as loc,
 +          Supplier as s
 +    where (s.s_suppkey = loc.l_suppkey)
 +) as locs,
 +      tpch.q7_volume_shipping_tmp() as t
 +with  l_year0 as tpch."get-year"(locs.l_shipdate)
 +where ((locs.c_nationkey = t.c_nationkey) and (locs.s_nationkey = t.s_nationkey))
 +group by t.supp_nation as supp_nation,t.cust_nation as cust_nation,l_year0 as l_year
- with  revenue as tpch.sum((
++with  revenue as COLL_SUM((
 +      select element (i.l_extendedprice * (1 - i.l_discount))
 +      from  locs as i
 +  ))
 +order by supp_nation,cust_nation,l_year
 +;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch/q08_national_market_share/q08_national_market_share.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch/q08_national_market_share/q08_national_market_share.3.query.sqlpp
index 77d3881,0000000..b5e49f7
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch/q08_national_market_share/q08_national_market_share.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch/q08_national_market_share/q08_national_market_share.3.query.sqlpp
@@@ -1,69 -1,0 +1,69 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +
 +use tpch;
 +
 +
- select element {'year':year,'mkt_share':(tpch.sum((
++select element {'year':year,'mkt_share':(COLL_SUM((
 +          select element tpch."switch-case"((i.s_name = 'BRAZIL'),true,i.revenue,false,0.0)
 +          from  t as i
-       )) / tpch.sum((
++      )) / COLL_SUM((
 +          select element i.revenue
 +          from  t as i
 +      )))}
 +from  (
 +    select element {'year':o_year,'revenue':(slnrcop.l_extendedprice * (1 - slnrcop.l_discount)),'s_name':n2.n_name}
 +    from  (
 +        select element {'o_orderdate':lnrcop.o_orderdate,'l_discount':lnrcop.l_discount,'l_extendedprice':lnrcop.l_extendedprice,'l_suppkey':lnrcop.l_suppkey,'s_nationkey':s.s_nationkey}
 +        from  Supplier as s,
 +              (
 +            select element {'o_orderdate':lnrco.o_orderdate,'l_discount':lnrco.l_discount,'l_extendedprice':lnrco.l_extendedprice,'l_suppkey':lnrco.l_suppkey}
 +            from  (
 +                select element {'o_orderdate':nrco.o_orderdate,'l_partkey':l.l_partkey,'l_discount':l.l_discount,'l_extendedprice':l.l_extendedprice,'l_suppkey':l.l_suppkey}
 +                from  LineItem as l,
 +                      (
 +                    select element {'o_orderdate':o.o_orderdate,'o_orderkey':o.o_orderkey}
 +                    from  Orders as o,
 +                          (
 +                        select element {'c_custkey':c.c_custkey}
 +                        from  Customer as c,
 +                              (
 +                            select element {'n_nationkey':n1.n_nationkey}
 +                            from  Nation as n1,
 +                                  Region as r1
 +                            where ((n1.n_regionkey = r1.r_regionkey) and (r1.r_name = 'AMERICA'))
 +                        ) as nr
 +                        where (c.c_nationkey = nr.n_nationkey)
 +                    ) as nrc
 +                    where (nrc.c_custkey = o.o_custkey)
 +                ) as nrco
 +                where ((l.l_orderkey = nrco.o_orderkey) and (nrco.o_orderdate >= '1995-01-01') and (nrco.o_orderdate < '1996-12-31'))
 +            ) as lnrco,
 +                  Part as p
 +            where ((p.p_partkey = lnrco.l_partkey) and (p.p_type = 'ECONOMY ANODIZED STEEL'))
 +        ) as lnrcop
 +        where (s.s_suppkey = lnrcop.l_suppkey)
 +    ) as slnrcop,
 +          Nation as n2
 +    with  o_year as tpch."get-year"(slnrcop.o_orderdate)
 +    where (slnrcop.s_nationkey = n2.n_nationkey)
 +) as t
 +group by t.year as year
 +order by year
 +;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch/q09_product_type_profit_nt/q09_product_type_profit_nt.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch/q09_product_type_profit_nt/q09_product_type_profit_nt.3.query.sqlpp
index b7e5e4b,0000000..a76e49e
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch/q09_product_type_profit_nt/q09_product_type_profit_nt.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch/q09_product_type_profit_nt/q09_product_type_profit_nt.3.query.sqlpp
@@@ -1,57 -1,0 +1,57 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +
 +use tpch;
 +
 +
- select element {'nation':nation,'o_year':o_year,'sum_profit':tpch.sum((
++select element {'nation':nation,'o_year':o_year,'sum_profit':tpch.coll_sum((
 +        select element pr.amount
 +        from  profit as pr
 +    ))}
 +from  (
 +    select element {'nation':l3.n_name,'o_year':o_year,'amount':amount}
 +    from  Orders as o,
 +          (
 +        select element {'l_extendedprice':l2.l_extendedprice,'l_discount':l2.l_discount,'l_quantity':l2.l_quantity,'l_orderkey':l2.l_orderkey,'n_name':l2.n_name,'ps_supplycost':l2.ps_supplycost}
 +        from  Part as p join
 +              (
 +            select element {'l_extendedprice':l1.l_extendedprice,'l_discount':l1.l_discount,'l_quantity':l1.l_quantity,'l_partkey':l1.l_partkey,'l_orderkey':l1.l_orderkey,'n_name':l1.n_name,'ps_supplycost':ps.ps_supplycost}
 +            from  Partsupp as ps join
 +                  (
 +                select element {'l_suppkey':l.l_suppkey,'l_extendedprice':l.l_extendedprice,'l_discount':l.l_discount,'l_quantity':l.l_quantity,'l_partkey':l.l_partkey,'l_orderkey':l.l_orderkey,'n_name':s1.n_name}
 +                from  (
 +                    select element {'s_suppkey':s.s_suppkey,'n_name':n.n_name}
 +                    from  Supplier as s,
 +                          Nation as n
 +                    where (n.n_nationkey = s.s_nationkey)
 +                ) as s1 join
 +                      LineItem as l
 +                on (s1.s_suppkey = l.l_suppkey)
 +            ) as l1
 +            on ((ps.ps_suppkey = l1.l_suppkey) and (ps.ps_partkey = l1.l_partkey))
 +        ) as l2
 +         on (tpch.contains(p.p_name,'green') and (p.p_partkey = l2.l_partkey))
 +    ) as l3
 +    with  amount as ((l3.l_extendedprice * (1 - l3.l_discount)) - (l3.ps_supplycost * l3.l_quantity)),
 +          o_year as tpch."get-year"(o.o_orderdate)
 +    where (o.o_orderkey = l3.l_orderkey)
 +) as profit
 +group by profit.nation as nation,profit.o_year as o_year
 +order by nation,o_year desc
 +;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch/q10_returned_item/q10_returned_item.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch/q10_returned_item/q10_returned_item.3.query.sqlpp
index 50fb6c3,0000000..7278f81
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch/q10_returned_item/q10_returned_item.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch/q10_returned_item/q10_returned_item.3.query.sqlpp
@@@ -1,43 -1,0 +1,43 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +
 +use tpch;
 +
 +
 +select element {'c_custkey':c_custkey,'c_name':c_name,'revenue':revenue,'c_acctbal':c_acctbal,'n_name':n_name,'c_address':c_address,'c_phone':c_phone,'c_comment':c_comment}
 +from  (
 +    select element {'c_custkey':ocn.c_custkey,'c_name':ocn.c_name,'c_acctbal':ocn.c_acctbal,'n_name':ocn.n_name,'c_address':ocn.c_address,'c_phone':ocn.c_phone,'c_comment':ocn.c_comment,'l_extendedprice':l.l_extendedprice,'l_discount':l.l_discount}
 +    from  LineItem as l,
 +          (
 +        select element {'c_custkey':c.c_custkey,'c_name':c.c_name,'c_acctbal':c.c_acctbal,'n_name':n.n_name,'c_address':c.c_address,'c_phone':c.c_phone,'c_comment':c.c_comment,'o_orderkey':o.o_orderkey}
 +        from  Orders as o,
 +              Customer as c,
 +              Nation as n
 +        where (((c.c_custkey = o.o_custkey) and (o.o_orderdate >= '1993-10-01') and (o.o_orderdate < '1994-01-01')) and (c.c_nationkey = n.n_nationkey))
 +    ) as ocn
 +    where ((l.l_orderkey = ocn.o_orderkey) and (l.l_returnflag = 'R'))
 +) as locn
 +group by locn.c_custkey as c_custkey,locn.c_name as c_name,locn.c_acctbal as c_acctbal,locn.c_phone as c_phone,locn.n_name as n_name,locn.c_address as c_address,locn.c_comment as c_comment
- with  revenue as tpch.sum((
++with  revenue as tpch.coll_sum((
 +      select element (i.l_extendedprice * (1 - i.l_discount))
 +      from  locn as i
 +  ))
 +order by revenue desc
 +limit 20
 +;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch/q10_returned_item_int64/q10_returned_item_int64.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch/q10_returned_item_int64/q10_returned_item_int64.3.query.sqlpp
index 50fb6c3,0000000..7278f81
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch/q10_returned_item_int64/q10_returned_item_int64.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch/q10_returned_item_int64/q10_returned_item_int64.3.query.sqlpp
@@@ -1,43 -1,0 +1,43 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +
 +use tpch;
 +
 +
 +select element {'c_custkey':c_custkey,'c_name':c_name,'revenue':revenue,'c_acctbal':c_acctbal,'n_name':n_name,'c_address':c_address,'c_phone':c_phone,'c_comment':c_comment}
 +from  (
 +    select element {'c_custkey':ocn.c_custkey,'c_name':ocn.c_name,'c_acctbal':ocn.c_acctbal,'n_name':ocn.n_name,'c_address':ocn.c_address,'c_phone':ocn.c_phone,'c_comment':ocn.c_comment,'l_extendedprice':l.l_extendedprice,'l_discount':l.l_discount}
 +    from  LineItem as l,
 +          (
 +        select element {'c_custkey':c.c_custkey,'c_name':c.c_name,'c_acctbal':c.c_acctbal,'n_name':n.n_name,'c_address':c.c_address,'c_phone':c.c_phone,'c_comment':c.c_comment,'o_orderkey':o.o_orderkey}
 +        from  Orders as o,
 +              Customer as c,
 +              Nation as n
 +        where (((c.c_custkey = o.o_custkey) and (o.o_orderdate >= '1993-10-01') and (o.o_orderdate < '1994-01-01')) and (c.c_nationkey = n.n_nationkey))
 +    ) as ocn
 +    where ((l.l_orderkey = ocn.o_orderkey) and (l.l_returnflag = 'R'))
 +) as locn
 +group by locn.c_custkey as c_custkey,locn.c_name as c_name,locn.c_acctbal as c_acctbal,locn.c_phone as c_phone,locn.n_name as n_name,locn.c_address as c_address,locn.c_comment as c_comment
- with  revenue as tpch.sum((
++with  revenue as tpch.coll_sum((
 +      select element (i.l_extendedprice * (1 - i.l_discount))
 +      from  locn as i
 +  ))
 +order by revenue desc
 +limit 20
 +;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch/q11_important_stock/q11_important_stock.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch/q11_important_stock/q11_important_stock.3.query.sqlpp
index ea3bd5a,0000000..6956c4b
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch/q11_important_stock/q11_important_stock.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch/q11_important_stock/q11_important_stock.3.query.sqlpp
@@@ -1,52 -1,0 +1,52 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +
 +use tpch;
 +
 +
- with  sum as tpch.sum((
++with  sum as COLL_SUM((
 +      select element (ps.ps_supplycost * ps.ps_availqty)
 +      from  Partsupp as ps,
 +            (
 +          select element {'s_suppkey':s.s_suppkey}
 +          from  Supplier as s,
 +                Nation as n
 +          where (s.s_nationkey = n.n_nationkey)
 +      ) as sn
 +      where (ps.ps_suppkey = sn.s_suppkey)
 +  ))
 +select element {'partkey':t1.ps_partkey,'part_value':t1.part_value}
 +from  (
-     select element {'ps_partkey':ps_partkey,'part_value':tpch.sum((
++    select element {'ps_partkey':ps_partkey,'part_value':COLL_SUM((
 +            select element (i.ps_supplycost * i.ps_availqty)
 +            from  ps as i
 +        ))}
 +    from  Partsupp as ps,
 +          (
 +        select element {'s_suppkey':s.s_suppkey}
 +        from  Supplier as s,
 +              Nation as n
 +        where (s.s_nationkey = n.n_nationkey)
 +    ) as sn
 +    where (ps.ps_suppkey = sn.s_suppkey)
 +    group by ps.ps_partkey as ps_partkey
 +) as t1
 +where (t1.part_value > (sum * 0.00001))
 +order by t1.part_value desc
 +;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch/q12_shipping/q12_shipping.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch/q12_shipping/q12_shipping.3.query.sqlpp
index 61b685e,0000000..233ec2f
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch/q12_shipping/q12_shipping.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch/q12_shipping/q12_shipping.3.query.sqlpp
@@@ -1,35 -1,0 +1,35 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +
 +use tpch;
 +
 +
- select element {'l_shipmode':l_shipmode,'high_line_count':tpch.sum((
++select element {'l_shipmode':l_shipmode,'high_line_count':COLL_SUM((
 +        select element tpch."switch-case"(((i.o_orderpriority = '1-URGENT') or (i.o_orderpriority = '2-HIGH')),true,1,false,0)
 +        from  o as i
-     )),'low_line_count':tpch.sum((
++    )),'low_line_count':COLL_SUM((
 +        select element tpch."switch-case"(((i.o_orderpriority = '1-URGENT') or (i.o_orderpriority = '2-HIGH')),true,0,false,1)
 +        from  o as i
 +    ))}
 +from  LineItem as l,
 +      Orders as o
 +where ((o.o_orderkey = l.l_orderkey) and (l.l_commitdate < l.l_receiptdate) and (l.l_shipdate < l.l_commitdate) and (l.l_receiptdate >= '1994-01-01') and (l.l_receiptdate < '1995-01-01') and ((l.l_shipmode = 'MAIL') or (l.l_shipmode = 'SHIP')))
 +group by l.l_shipmode as l_shipmode
 +order by l_shipmode
 +;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch/q13_customer_distribution/q13_customer_distribution.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch/q13_customer_distribution/q13_customer_distribution.3.query.sqlpp
index afa7c9b,0000000..bf01ff7
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch/q13_customer_distribution/q13_customer_distribution.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch/q13_customer_distribution/q13_customer_distribution.3.query.sqlpp
@@@ -1,44 -1,0 +1,44 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +
 +use tpch;
 +
 +
 +set "import-private-functions" "true";
 +
 +select element {'c_count':c_count,'custdist':custdist}
 +from  (
-     select element {'c_custkey':c_custkey,'c_count':tpch.sum((
++    select element {'c_custkey':c_custkey,'c_count':COLL_SUM((
 +            select element i.o_orderkey_count
 +            from  co as i
 +        ))}
 +    from  (
-         select element {'c_custkey':c.c_custkey,'o_orderkey_count':tpch.count((
++        select element {'c_custkey':c.c_custkey,'o_orderkey_count':coll_count((
 +                select element o.o_orderkey
 +                from  Orders as o
 +                where ((c.c_custkey = o.o_custkey) and tpch.not(tpch.like(o.o_comment,'%special%requests%')))
 +            ))}
 +        from  Customer as c
 +    ) as co
 +    group by co.c_custkey as c_custkey
 +) as gco
 +group by gco.c_count as c_count
 +with  custdist as tpch.count(gco)
 +order by custdist desc,c_count desc
 +;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch/q14_promotion_effect/q14_promotion_effect.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch/q14_promotion_effect/q14_promotion_effect.3.query.sqlpp
index a8f29d3,0000000..3b15a4d
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch/q14_promotion_effect/q14_promotion_effect.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch/q14_promotion_effect/q14_promotion_effect.3.query.sqlpp
@@@ -1,35 -1,0 +1,35 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +
 +use tpch;
 +
 +
- select element (100.0 * tpch.sum((
-       select element tpch."switch-case"(tpch.like(i.p_type,'PROMO%'),true,(i.l_extendedprice * (1 - i.l_discount)),false,0.0)
++select element (100.0 * COLL_SUM((
++      select element tpch."switch-case"(LIKE(i.p_type,'PROMO%'),true,(i.l_extendedprice * (1 - i.l_discount)),false,0.0)
 +      from  lp as i
-   )) / tpch.sum((
++  )) / COLL_SUM((
 +      select element (i.l_extendedprice * (1 - i.l_discount))
 +      from  lp as i
 +  )))
 +from  LineItem as l,
 +      Part as p
 +let lp = {'p_type': p.p_type, 'l_extendedprice': l.l_extendedprice, 'l_discount': l.l_discount}
 +where ((l.l_partkey = p.p_partkey) and (l.l_shipdate >= '1995-09-01') and (l.l_shipdate < '1995-10-01'))
 +group by 1 as t
 +;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch/q15_top_supplier/q15_top_supplier.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch/q15_top_supplier/q15_top_supplier.3.query.sqlpp
index e21104b,0000000..70b5d38
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch/q15_top_supplier/q15_top_supplier.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch/q15_top_supplier/q15_top_supplier.3.query.sqlpp
@@@ -1,42 -1,0 +1,42 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +
 +use tpch;
 +
 +
 +declare function revenue() {
 +(
-     select element {'supplier_no':l_suppkey,'total_revenue':tpch.sum((
++    select element {'supplier_no':l_suppkey,'total_revenue':COLL_SUM((
 +            select element (i.l_extendedprice * (1 - i.l_discount))
 +            from  l as i
 +        ))}
 +    from  LineItem as l
 +    where ((l.l_shipdate >= '1996-01-01') and (l.l_shipdate < '1996-04-01'))
 +    group by l.l_suppkey as l_suppkey
 +)
 +};
- with  m as tpch.max((
++with  m as COLL_MAX((
 +      select element r2.total_revenue
 +      from  tpch.revenue() as r2
 +  ))
 +select element {'s_suppkey':s.s_suppkey,'s_name':s.s_name,'s_address':s.s_address,'s_phone':s.s_phone,'total_revenue':r.total_revenue}
 +from  Supplier as s,
 +      tpch.revenue() as r
 +where ((s.s_suppkey = r.supplier_no) and (r.total_revenue < (m + 0.000000001)) and (r.total_revenue > (m - 0.000000001)))
 +;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch/q16_parts_supplier_relationship/q16_parts_supplier_relationship.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch/q16_parts_supplier_relationship/q16_parts_supplier_relationship.3.query.sqlpp
index cc4d01a,0000000..b9fa20e
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch/q16_parts_supplier_relationship/q16_parts_supplier_relationship.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch/q16_parts_supplier_relationship/q16_parts_supplier_relationship.3.query.sqlpp
@@@ -1,49 -1,0 +1,49 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +
 +use tpch;
 +
 +
 +declare function tmp() {
 +(
 +    select element {'p_brand':psp.p_brand,'p_type':psp.p_type,'p_size':psp.p_size,'ps_suppkey':psp.ps_suppkey}
 +    from  (
 +        select element {'p_brand':p.p_brand,'p_type':p.p_type,'p_size':p.p_size,'ps_suppkey':ps.ps_suppkey}
 +        from  Partsupp as ps,
 +              Part as p
-         where ((p.p_partkey = ps.ps_partkey) and (p.p_brand != 'Brand#45') and tpch.not(tpch.like(p.p_type,'MEDIUM POLISHED%')))
++        where ((p.p_partkey = ps.ps_partkey) and (p.p_brand != 'Brand#45') and NOT(tpch.like(p.p_type,'MEDIUM POLISHED%')))
 +    ) as psp,
 +          Supplier as s
-     where ((psp.ps_suppkey = s.s_suppkey) and tpch.not(tpch.like(s.s_comment,'%Customer%Complaints%')))
++    where ((psp.ps_suppkey = s.s_suppkey) and tpch.not(LIKE(s.s_comment,'%Customer%Complaints%')))
 +)
 +};
 +select element {'p_brand':p_brand,'p_type':p_type,'p_size':p_size,'supplier_cnt':supplier_cnt}
 +from  (
 +    select element {'p_brand':p_brand1,'p_type':p_type1,'p_size':p_size1,'ps_suppkey':ps_suppkey1}
 +    from  tpch.tmp() as t
 +    where ((t.p_size = 49) or (t.p_size = 14) or (t.p_size = 23) or (t.p_size = 45) or (t.p_size = 19) or (t.p_size = 3) or (t.p_size = 36) or (t.p_size = 9))
 +    group by t.p_brand as p_brand1,t.p_type as p_type1,t.p_size as p_size1,t.ps_suppkey as ps_suppkey1
 +) as t2
 +group by t2.p_brand as p_brand,t2.p_type as p_type,t2.p_size as p_size
- with  supplier_cnt as tpch.count((
++with  supplier_cnt as COLL_COUNT((
 +      select element i.ps_suppkey
 +      from  t2 as i
 +  ))
 +order by supplier_cnt desc,p_brand,p_type,p_size
 +;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch/q17_large_gby_variant/q17_large_gby_variant.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch/q17_large_gby_variant/q17_large_gby_variant.3.query.sqlpp
index ada4f75,0000000..d39c75a
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch/q17_large_gby_variant/q17_large_gby_variant.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch/q17_large_gby_variant/q17_large_gby_variant.3.query.sqlpp
@@@ -1,57 -1,0 +1,57 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +
 +use tpch;
 +
 +
- select element {'t_partkey':l_partkey,'t_count':tpch.count(l),'t_avg_quantity':(0.2 * tpch.avg((
++select element {'t_partkey':l_partkey,'t_count':COLL_COUNT(l),'t_avg_quantity':(0.2 * COLL_AVG((
 +          select element i.l_quantity
 +          from  l as i
-       ))),'t_max_suppkey':tpch.max((
++      ))),'t_max_suppkey':COLL_MAX((
 +        select element i.l_suppkey
 +        from  l as i
-     )),'t_max_linenumber':tpch.max((
++    )),'t_max_linenumber':COLL_MAX((
 +        select element i.l_linenumber
 +        from  l as i
-     )),'t_avg_extendedprice':tpch.avg((
++    )),'t_avg_extendedprice':COLL_AVG((
 +        select element i.l_extendedprice
 +        from  l as i
-     )),'t_avg_discount':tpch.avg((
++    )),'t_avg_discount':COLL_AVG((
 +        select element i.l_discount
 +        from  l as i
-     )),'t_avg_tax':tpch.avg((
++    )),'t_avg_tax':COLL_AVG((
 +        select element i.l_tax
 +        from  l as i
-     )),'t_max_shipdate':tpch.max((
++    )),'t_max_shipdate':COLL_MAX((
 +        select element i.l_shipdate
 +        from  l as i
-     )),'t_min_commitdate':tpch.min((
++    )),'t_min_commitdate':COLL_MIN((
 +        select element i.l_commitdate
 +        from  l as i
-     )),'t_min_receiptdate':tpch.min((
++    )),'t_min_receiptdate':COLL_MIN((
 +        select element i.l_receiptdate
 +        from  l as i
-     )),'t_max_comment':tpch.max((
++    )),'t_max_comment':COLL_MAX((
 +        select element i.l_comment
 +        from  l as i
 +    ))}
 +from  LineItem as l
 +group by l.l_partkey as l_partkey
 +order by l_partkey
 +;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch/q17_small_quantity_order_revenue/q17_small_quantity_order_revenue.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch/q17_small_quantity_order_revenue/q17_small_quantity_order_revenue.3.query.sqlpp
index 79c397c,0000000..ddbbecc
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch/q17_small_quantity_order_revenue/q17_small_quantity_order_revenue.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch/q17_small_quantity_order_revenue/q17_small_quantity_order_revenue.3.query.sqlpp
@@@ -1,40 -1,0 +1,40 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +
 +use tpch;
 +
 +
 +declare function tmp() {
 +(
-     select element {'t_partkey':l_partkey,'t_avg_quantity':(0.2 * tpch.avg((
++    select element {'t_partkey':l_partkey,'t_avg_quantity':(0.2 * tpch.coll_avg((
 +              select element i.l_quantity
 +              from  l as i
 +          )))}
 +    from  LineItem as l
 +    group by l.l_partkey as l_partkey
 +)
 +};
 +
- select element (tpch.sum((
++select element (tpch.coll_sum((
 +      select element l.l_extendedprice
 +      from  tpch.tmp() as t,
 +            LineItem as l,
 +            Part as p
 +      where p.p_partkey = l.l_partkey and p.p_container = 'MED BOX' and l.l_partkey = t.t_partkey and l.l_quantity < t.t_avg_quantity
 +  )) / 7.0);

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch/q18_large_volume_customer/q18_large_volume_customer.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch/q18_large_volume_customer/q18_large_volume_customer.3.query.sqlpp
index 89ff8f8,0000000..e0976a0
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch/q18_large_volume_customer/q18_large_volume_customer.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch/q18_large_volume_customer/q18_large_volume_customer.3.query.sqlpp
@@@ -1,42 -1,0 +1,42 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +
 +use tpch;
 +
 +
- select element {'c_name':c_name,'c_custkey':c_custkey,'o_orderkey':o_orderkey,'o_orderdate':o_orderdate,'o_totalprice':o_totalprice,'sum_quantity':tpch.sum((
++select element {'c_name':c_name,'c_custkey':c_custkey,'o_orderkey':o_orderkey,'o_orderdate':o_orderdate,'o_totalprice':o_totalprice,'sum_quantity':tpch.coll_sum((
 +        select element j.l_quantity
 +        from  l as j
 +    ))}
 +from  Customer as c,
 +      Orders as o,
 +      (
-     select element {'l_orderkey':l_orderkey,'t_sum_quantity':tpch.sum((
++    select element {'l_orderkey':l_orderkey,'t_sum_quantity':tpch.coll_sum((
 +            select element i.l_quantity
 +            from  l as i
 +        ))}
 +    from  LineItem as l
 +    group by l.l_orderkey as l_orderkey
 +) as t,
 +      LineItem as l
 +where ((c.c_custkey = o.o_custkey) and (o.o_orderkey = t.l_orderkey) and (t.t_sum_quantity > 30) and (l.l_orderkey = t.l_orderkey))
 +group by c.c_name as c_name,c.c_custkey as c_custkey,o.o_orderkey as o_orderkey,o.o_orderdate as o_orderdate,o.o_totalprice as o_totalprice
 +order by o_totalprice desc,o_orderdate
 +limit 100
 +;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch/q19_discounted_revenue/q19_discounted_revenue.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch/q19_discounted_revenue/q19_discounted_revenue.3.query.sqlpp
index 7065f87,0000000..2a43931
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch/q19_discounted_revenue/q19_discounted_revenue.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch/q19_discounted_revenue/q19_discounted_revenue.3.query.sqlpp
@@@ -1,30 -1,0 +1,30 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +
 +use tpch;
 +
 +
 +set "import-private-functions" "true";
 +
- select element tpch.sum((
++select element COLL_SUM((
 +    select element (l.l_extendedprice * (1 - l.l_discount))
 +    from  LineItem as l,
 +          Part as p
 +    where ((p.p_partkey = l.l_partkey) and (((p.p_brand = 'Brand#12') and tpch."reg-exp"(p.p_container,'SM CASE||SM BOX||SM PACK||SM PKG') and (l.l_quantity >= 1) and (l.l_quantity <= 11) and (p.p_size >= 1) and (p.p_size <= 5) and tpch."reg-exp"(l.l_shipmode,'AIR||AIR REG') and (l.l_shipinstruct = 'DELIVER IN PERSON')) or ((p.p_brand = 'Brand#23') and tpch."reg-exp"(p.p_container,'MED BAG||MED BOX||MED PKG||MED PACK') and (l.l_quantity >= 10) and (l.l_quantity <= 20) and (p.p_size >= 1) and (p.p_size <= 10) and tpch."reg-exp"(l.l_shipmode,'AIR||AIR REG') and (l.l_shipinstruct = 'DELIVER IN PERSON')) or ((p.p_brand = 'Brand#34') and tpch."reg-exp"(p.p_container,'LG CASE||LG BOX||LG PACK||LG PKG') and (l.l_quantity >= 20) and (l.l_quantity <= 30) and (p.p_size >= 1) and (p.p_size <= 15) and tpch."reg-exp"(l.l_shipmode,'AIR||AIR REG') and (l.l_shipinstruct = 'DELIVER IN PERSON'))))
 +));

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch/q20_potential_part_promotion/q20_potential_part_promotion.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch/q20_potential_part_promotion/q20_potential_part_promotion.3.query.sqlpp
index 0657ad3,0000000..2bc3b25
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch/q20_potential_part_promotion/q20_potential_part_promotion.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch/q20_potential_part_promotion/q20_potential_part_promotion.3.query.sqlpp
@@@ -1,53 -1,0 +1,53 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +
 +use tpch;
 +
 +
 +select element {'s_name':t4.s_name,'s_address':t4.s_address}
 +from  (
 +    select distinct element {'ps_suppkey':pst1.ps_suppkey}
 +    from  (
-         select element {'l_partkey':l_partkey,'l_suppkey':l_suppkey,'sum_quantity':(0.5 * tpch.sum((
++        select element {'l_partkey':l_partkey,'l_suppkey':l_suppkey,'sum_quantity':(0.5 * COLL_SUM((
 +                  select element i.l_quantity
 +                  from  l as i
 +              )))}
 +        from  LineItem as l
 +        group by l.l_partkey as l_partkey,l.l_suppkey as l_suppkey
 +    ) as t2,
 +          (
 +        select element {'ps_suppkey':ps.ps_suppkey,'ps_partkey':ps.ps_partkey,'ps_availqty':ps.ps_availqty}
 +        from  Partsupp as ps,
 +              (
 +            select distinct element {'p_partkey':p.p_partkey}
 +            from  Part as p
 +        ) as t1
 +        where (ps.ps_partkey = t1.p_partkey)
 +    ) as pst1
 +    where ((pst1.ps_partkey = t2.l_partkey) and (pst1.ps_suppkey = t2.l_suppkey) and (pst1.ps_availqty > t2.sum_quantity))
 +) as t3,
 +      (
 +    select element {'s_name':s.s_name,'s_address':s.s_address,'s_suppkey':s.s_suppkey}
 +    from  Nation as n,
 +          Supplier as s
 +    where (s.s_nationkey = n.n_nationkey)
 +) as t4
 +where (t3.ps_suppkey = t4.s_suppkey)
 +order by t4.s_name
 +;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch/q21_suppliers_who_kept_orders_waiting/q21_suppliers_who_kept_orders_waiting.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch/q21_suppliers_who_kept_orders_waiting/q21_suppliers_who_kept_orders_waiting.3.query.sqlpp
index f91068d,0000000..3ccb9b8
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch/q21_suppliers_who_kept_orders_waiting/q21_suppliers_who_kept_orders_waiting.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch/q21_suppliers_who_kept_orders_waiting/q21_suppliers_who_kept_orders_waiting.3.query.sqlpp
@@@ -1,80 -1,0 +1,80 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +
 +use tpch;
 +
 +
 +declare function tmp1() {
 +(
-     select element {'l_orderkey':l_orderkey,'count_suppkey':tpch.count((
++    select element {'l_orderkey':l_orderkey,'count_suppkey':coll_count((
 +            select element i.l_suppkey
 +            from  l2 as i
-         )),'max_suppkey':tpch.max((
++        )),'max_suppkey':tpch.coll_max((
 +            select element i.l_suppkey
 +            from  l2 as i
 +        ))}
 +    from  (
 +        select element {'l_orderkey':l_orderkey1,'l_suppkey':l_suppkey1}
 +        from  LineItem as l
 +        group by l.l_orderkey as l_orderkey1,l.l_suppkey as l_suppkey1
 +    ) as l2
 +    group by l2.l_orderkey as l_orderkey
 +)
 +};
 +declare function tmp2() {
 +(
-     select element {'l_orderkey':l_orderkey,'count_suppkey':tpch.count((
++    select element {'l_orderkey':l_orderkey,'count_suppkey':coll_count((
 +            select element i.l_suppkey
 +            from  l2 as i
-         )),'max_suppkey':tpch.max((
++        )),'max_suppkey':tpch.coll_max((
 +            select element i.l_suppkey
 +            from  l2 as i
 +        ))}
 +    from  (
 +        select element {'l_orderkey':l_orderkey1,'l_suppkey':l_suppkey1}
 +        from  LineItem as l
 +        where (l.l_receiptdate > l.l_commitdate)
 +        group by l.l_orderkey as l_orderkey1,l.l_suppkey as l_suppkey1
 +    ) as l2
 +    group by l2.l_orderkey as l_orderkey
 +)
 +};
 +select element {'s_name':s_name,'numwait':numwait}
 +from  (
 +    select element {'s_name':t3.s_name,'l_suppkey':t3.l_suppkey,'l_orderkey':t2.l_orderkey,'count_suppkey':t2.count_suppkey,'max_suppkey':t2.max_suppkey}
 +    from  (
 +            select element {'s_name':ns.s_name,'l_orderkey':t1.l_orderkey,'l_suppkey':l.l_suppkey}
 +            from  LineItem as l,
 +                  (
 +                        select element {'s_name':s.s_name,'s_suppkey':s.s_suppkey}
 +                        from  Nation as n,
 +                        Supplier as s
 +                        where (s.s_nationkey = n.n_nationkey)
 +                   ) as ns,
 +                   Orders as o,
 +                   tpch.tmp1() as t1
 +            where ns.s_suppkey = l.l_suppkey and l.l_receiptdate > l.l_commitdate and o.o_orderkey = l.l_orderkey and l.l_orderkey = t1.l_orderkey
 +    ) as t3,
 +      tpch.tmp2() as t2
 +    where ((t2.count_suppkey >= 0) and (t3.l_orderkey = t2.l_orderkey))
 +) as t4
 +group by t4.s_name as s_name
 +with  numwait as tpch.count(t4)
 +order by numwait desc,s_name
 +;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch/q22_global_sales_opportunity/q22_global_sales_opportunity.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch/q22_global_sales_opportunity/q22_global_sales_opportunity.3.query.sqlpp
index 6136008,0000000..274adbc
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch/q22_global_sales_opportunity/q22_global_sales_opportunity.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch/q22_global_sales_opportunity/q22_global_sales_opportunity.3.query.sqlpp
@@@ -1,42 -1,0 +1,42 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +
 +use tpch;
 +
 +
 +declare function q22_customer_tmp() {
 +(
-     select element {'c_acctbal':c.c_acctbal,'c_custkey':c.c_custkey,'cntrycode':tpch.substring(c.c_phone,1,2)}
++    select element {'c_acctbal':c.c_acctbal,'c_custkey':c.c_custkey,'cntrycode':SUBSTR(c.c_phone,1,2)}
 +    from  Customer as c
 +)
 +};
- with  avg as tpch.avg((
++with  avg as tpch.coll_avg((
 +      select element c.c_acctbal
 +      from  Customer as c
 +      where (c.c_acctbal > 0.0)
 +  ))
- select element {'cntrycode':cntrycode,'numcust':tpch.count(ct),'totacctbal':tpch.sum((
++select element {'cntrycode':cntrycode,'numcust':COLL_COUNT(ct),'totacctbal':COLL_SUM((
 +        select element i.c_acctbal
 +        from  ct as i
 +    ))}
 +from  tpch.q22_customer_tmp() as ct
 +where (ct.c_acctbal > avg)
 +group by ct.cntrycode as cntrycode
 +order by cntrycode
 +;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch/query-issue562/query-issue562.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch/query-issue562/query-issue562.3.query.sqlpp
index ac54a30,0000000..eaac9ce
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch/query-issue562/query-issue562.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch/query-issue562/query-issue562.3.query.sqlpp
@@@ -1,55 -1,0 +1,55 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +/*
 + * Description  : This test case is to verify the fix for issue562
 + * https://code.google.com/p/asterixdb/issues/detail?id=562
 + * Expected Res : SUCCESS
 + * Date         : 15th Jan. 2015
 + */
 +
 +use tpch;
 +
 +
 +declare function q22_customer_tmp() {
 +(
 +    select element {'c_acctbal':c.c_acctbal,'c_custkey':c.c_custkey,'cntrycode':phone_substr}
 +    from  Customer as c
 +    with  phone_substr as tpch.substring(c.c_phone,1,2)
 +    where ((phone_substr = '13') or (phone_substr = '31') or (phone_substr = '23') or (phone_substr = '29') or (phone_substr = '30') or (phone_substr = '18') or (phone_substr = '17'))
 +)
 +};
- with  avg as tpch.avg((
++with  avg as tpch.coll_avg((
 +      select element c.c_acctbal
 +      from  Customer as c
 +      with  phone_substr as tpch.substring(c.c_phone,1,2)
 +      where ((c.c_acctbal > 0.0) and ((phone_substr = '13') or (phone_substr = '31') or (phone_substr = '23') or (phone_substr = '29') or (phone_substr = '30') or (phone_substr = '18') or (phone_substr = '17')))
 +  ))
- select element {'cntrycode':cntrycode,'numcust':tpch.count(ct),'totacctbal':tpch.sum((
++select element {'cntrycode':cntrycode,'numcust':tpch.count(ct),'totacctbal':tpch.coll_sum((
 +        select element i.c_acctbal
 +        from  ct as i
 +    ))}
 +from  tpch.q22_customer_tmp() as ct
- where (tpch.count((
++where (coll_count((
 +    select element o
 +    from  Orders as o
 +    where (ct.c_custkey = o.o_custkey)
 +)) = 0)
 +group by ct.cntrycode as cntrycode
 +order by cntrycode
 +;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch/query-issue638/query-issue638.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch/query-issue638/query-issue638.3.query.sqlpp
index 6c14062,0000000..3a42722
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch/query-issue638/query-issue638.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch/query-issue638/query-issue638.3.query.sqlpp
@@@ -1,63 -1,0 +1,63 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +/*
 + * Description  : This test case is to verify the fix for issue638
 + * https://code.google.com/p/asterixdb/issues/detail?id=638
 + * Expected Res : SUCCESS
 + * Date         : 24th Oct. 2014
 + */
 +
 +use tpch;
 +
 +
- select element {'nation':nation,'o_year':o_year,'sum_profit':tpch.sum((
++select element {'nation':nation,'o_year':o_year,'sum_profit':tpch.coll_sum((
 +        select element pr.amount
 +        from  profit as pr
 +    ))}
 +from  (
 +    select element {'nation':l3.n_name,'o_year':o_year,'amount':amount}
 +    from  Orders as o,
 +          (
 +        select element {'l_extendedprice':l2.l_extendedprice,'l_discount':l2.l_discount,'l_quantity':l2.l_quantity,'l_orderkey':l2.l_orderkey,'n_name':l2.n_name,'ps_supplycost':l2.ps_supplycost}
 +        from  Part as p,
 +              (
 +            select element {'l_extendedprice':l1.l_extendedprice,'l_discount':l1.l_discount,'l_quantity':l1.l_quantity,'l_partkey':l1.l_partkey,'l_orderkey':l1.l_orderkey,'n_name':l1.n_name,'ps_supplycost':ps.ps_supplycost}
 +            from  Partsupp as ps,
 +                  (
 +                select element {'l_suppkey':l.l_suppkey,'l_extendedprice':l.l_extendedprice,'l_discount':l.l_discount,'l_quantity':l.l_quantity,'l_partkey':l.l_partkey,'l_orderkey':l.l_orderkey,'n_name':s1.n_name}
 +                from  (
 +                    select element {'s_suppkey':s.s_suppkey,'n_name':n.n_name}
 +                    from  Supplier as s,
 +                          Nation as n
 +                    where (n.n_nationkey = s.s_nationkey)
 +                ) as s1,
 +                      LineItem as l
 +                where (s1.s_suppkey = l.l_suppkey)
 +            ) as l1
 +            where ((ps.ps_suppkey = l1.l_suppkey) and (ps.ps_partkey = l1.l_partkey))
 +        ) as l2
 +        where (tpch.contains(p.p_name,'green') and (p.p_partkey = l2.l_partkey))
 +    ) as l3
 +    with  amount as ((l3.l_extendedprice * (1 - l3.l_discount)) - (l3.ps_supplycost * l3.l_quantity)),
 +          o_year as tpch."get-year"(o.o_orderdate)
 +    where (o.o_orderkey = l3.l_orderkey)
 +) as profit
 +group by profit.nation as nation,profit.o_year as o_year
 +order by nation,o_year desc
 +;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch/query-issue785-2/query-issue785-2.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch/query-issue785-2/query-issue785-2.3.query.sqlpp
index 2cad6ea,0000000..de434ef
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch/query-issue785-2/query-issue785-2.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch/query-issue785-2/query-issue785-2.3.query.sqlpp
@@@ -1,55 -1,0 +1,55 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +/*
 + * Description  : This test case is to verify the fix for issue785
 + * https://code.google.com/p/asterixdb/issues/detail?id=785
 + * Expected Res : SUCCESS
 + * Date         : 2nd Oct. 2014
 + */
 +
 +use tpch;
 +
 +
 +with  t as (
 +      select element {'n_nationkey':nation.n_nationkey,'n_name':nation.n_name}
 +      from  Nation as nation,
 +            SelectedNation as sn
 +      where (nation.n_nationkey = sn.n_nationkey)
 +  ),
 +      X as (
 +      select element {'nation_key':nation_key,'order_date':orderdate,'sum_price':sum}
 +      from  t as n,
 +            Customer as customer,
 +            Orders as orders
 +      where ((orders.o_custkey = customer.c_custkey) and (customer.c_nationkey = n.n_nationkey))
 +      group by orders.o_orderdate as orderdate,n.n_nationkey as nation_key
-       with  sum as tpch.sum((
++      with  sum as tpch.coll_sum((
 +            select element o.o_totalprice
 +            from  orders as o
 +        ))
 +  )
 +select element {'nation_key':nation_key,'sum_price':(
 +        select element {'orderdate':y.order_date,'sum_price':y.sum_price}
 +        from  x as y
 +        order by y.sum_price desc
 +        limit 3
 +    )}
 +from  X as x
 +group by x.nation_key as nation_key
 +;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch/query-issue785/query-issue785.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch/query-issue785/query-issue785.3.query.sqlpp
index ed649ca,0000000..c7761f3
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch/query-issue785/query-issue785.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch/query-issue785/query-issue785.3.query.sqlpp
@@@ -1,52 -1,0 +1,52 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +/*
 + * Description  : This test case is to verify the fix for issue785
 + * https://code.google.com/p/asterixdb/issues/detail?id=785
 + * Expected Res : SUCCESS
 + * Date         : 2nd Oct. 2014
 + */
 +
 +use tpch;
 +
 +
 +select element {'nation_key':nation_key,'sum_price':(
 +        select element {'orderdate':od,'sum_price':sum}
 +        from  x as i
 +        group by i.order_date as od
-         with  sum as tpch.sum((
++        with  sum as tpch.coll_sum((
 +              select element s.sum_price
 +              from  i as s
 +          ))
 +        order by sum desc
 +        limit 3
 +    )}
 +from  (
-     select element {'nation_key':nation_key,'order_date':orderdate,'sum_price':tpch.sum((
++    select element {'nation_key':nation_key,'order_date':orderdate,'sum_price':tpch.coll_sum((
 +            select element o.o_totalprice
 +            from  orders as o
 +        ))}
 +    from  Nation as n,
 +          Customer as customer,
 +          Orders as orders
 +    where ((orders.o_custkey = customer.c_custkey) and (customer.c_nationkey = n.n_nationkey))
 +    group by orders.o_orderdate as orderdate,n.n_nationkey as nation_key
 +) as x
 +group by x.nation_key as nation_key
 +;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch/query-issue786/query-issue786.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch/query-issue786/query-issue786.3.query.sqlpp
index dead643,0000000..37e3c92
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch/query-issue786/query-issue786.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch/query-issue786/query-issue786.3.query.sqlpp
@@@ -1,45 -1,0 +1,45 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +/*
 + * Description  : This test case is to verify the fix for issue786
 + * https://code.google.com/p/asterixdb/issues/detail?id=786
 + * Expected Res : SUCCESS
 + * Date         : 10th Oct. 2014
 + */
 +
 +use tpch;
 +
 +
 +select element {'nation_key':nation.n_nationkey,'name':nation.n_name,'aggregates':(
 +        select element {'order_date':orderdate,'sum_price':sum}
 +        from  Orders as orders,
 +              Customer as customer
 +        where ((orders.o_custkey = customer.c_custkey) and (customer.c_nationkey = nation.n_nationkey))
 +        group by orders.o_orderdate as orderdate
-         with  sum as tpch.sum((
++        with  sum as tpch.coll_sum((
 +              select element o.o_totalprice
 +              from  orders as o
 +          ))
 +        order by sum desc
 +        limit 3
 +    )}
 +from  Nation as nation,
 +      SelectedNation as sn
 +where (nation.n_nationkey = sn.sn_nationkey)
 +;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch/query-issue810-2/query-issue810-2.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch/query-issue810-2/query-issue810-2.3.query.sqlpp
index 16d3fe0,0000000..d75ea7b
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch/query-issue810-2/query-issue810-2.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch/query-issue810-2/query-issue810-2.3.query.sqlpp
@@@ -1,44 -1,0 +1,44 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +/*
 + * Description  : This test case is to verify the fix for issue810
 + * https://code.google.com/p/asterixdb/issues/detail?id=810
 + * Expected Res : SUCCESS
 + * Date         : 16th Nov. 2014
 + */
 +
 +use tpch;
 +
 +
- select element {'l_returnflag':l_returnflag,'l_linestatus':l_linestatus,'count_cheaps':tpch.count(cheaps),'total_charges':tpch.sum(charges)}
++select element {'l_returnflag':l_returnflag,'l_linestatus':l_linestatus,'count_cheaps':coll_count(cheaps),'total_charges':tpch.coll_sum(charges)}
 +from  LineItem as l
 +where (l.l_shipdate <= '1998-09-02')
 +/* +hash */
 +group by l.l_returnflag as l_returnflag,l.l_linestatus as l_linestatus
 +with  cheaps as (
 +      select element m
 +      from  l as m
 +      where (m.l_discount > 0.05)
 +  ),
 +      charges as (
 +      select element (a.l_extendedprice * (1 - a.l_discount) * (1 + a.l_tax))
 +      from  l as a
 +  )
 +order by l_returnflag,l_linestatus
 +;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch/query-issue810-3/query-issue810-3.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch/query-issue810-3/query-issue810-3.3.query.sqlpp
index 576192d,0000000..f2656ee
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch/query-issue810-3/query-issue810-3.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch/query-issue810-3/query-issue810-3.3.query.sqlpp
@@@ -1,53 -1,0 +1,53 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +/*
 + * Description  : This test case is to verify the fix for issue810
 + * https://code.google.com/p/asterixdb/issues/detail?id=810
 + * Expected Res : SUCCESS
 + * Date         : 16th Nov. 2014
 + */
 +
 +use tpch;
 +
 +
- select element {'l_returnflag':l_returnflag,'l_linestatus':l_linestatus,'count_cheaps':tpch.count(cheaps),'avg_expensive_discounts':tpch.avg(expensives),'sum_disc_prices':tpch.sum(disc_prices),'total_charges':tpch.sum(charges)}
++select element {'l_returnflag':l_returnflag,'l_linestatus':l_linestatus,'count_cheaps':coll_count(cheaps),'avg_expensive_discounts':tpch.coll_avg(expensives),'sum_disc_prices':tpch.coll_sum(disc_prices),'total_charges':tpch.coll_sum(charges)}
 +from  LineItem as l
 +where (l.l_shipdate <= '1998-09-02')
 +/* +hash */
 +group by l.l_returnflag as l_returnflag,l.l_linestatus as l_linestatus
 +with  expensives as (
 +      select element i.l_discount
 +      from  l as i
 +      where (i.l_discount <= 0.05)
 +  ),
 +      cheaps as (
 +      select element i
 +      from  l as i
 +      where (i.l_discount > 0.05)
 +  ),
 +      charges as (
 +      select element (i.l_extendedprice * (1 - i.l_discount) * (1 + i.l_tax))
 +      from  l as i
 +  ),
 +      disc_prices as (
 +      select element (i.l_extendedprice * (1 - i.l_discount))
 +      from  l as i
 +  )
 +order by l_returnflag,l_linestatus
 +;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch/query-issue810/query-issue810.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch/query-issue810/query-issue810.3.query.sqlpp
index 01e1654,0000000..f4638f2
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch/query-issue810/query-issue810.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch/query-issue810/query-issue810.3.query.sqlpp
@@@ -1,45 -1,0 +1,45 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +/*
 + * Description  : This test case is to verify the fix for issue810
 + * https://code.google.com/p/asterixdb/issues/detail?id=810
 + * Expected Res : SUCCESS
 + * Date         : 16th Nov. 2014
 + */
 +
 +use tpch;
 +
 +
- select element {'l_returnflag':l_returnflag,'l_linestatus':l_linestatus,'count_cheaps':tpch.count(cheap),'count_expensives':tpch.count(expensive)}
++select element {'l_returnflag':l_returnflag,'l_linestatus':l_linestatus,'count_cheaps':coll_count(cheap),'count_expensives':coll_count(expensive)}
 +from  LineItem as l
 +where (l.l_shipdate <= '1998-09-02')
 +/* +hash */
 +group by l.l_returnflag as l_returnflag,l.l_linestatus as l_linestatus
 +with  cheap as (
 +      select element m
 +      from  l as m
 +      where (m.l_discount > 0.05)
 +  ),
 +      expensive as (
 +      select element a
 +      from  l as a
 +      where (a.l_discount <= 0.05)
 +  )
 +order by l_returnflag,l_linestatus
 +;



[36/50] [abbrv] incubator-asterixdb git commit: Merge branch 'master' into hyracks-merge2

Posted by im...@apache.org.
Merge branch 'master' into hyracks-merge2


Project: http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/commit/8516517e
Tree: http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/tree/8516517e
Diff: http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/diff/8516517e

Branch: refs/heads/master
Commit: 8516517e1f678407ef8a6bea56c85314ed43c100
Parents: e1c203e 877407a
Author: Ian Maxon <im...@apache.org>
Authored: Wed Apr 6 18:56:39 2016 -0700
Committer: Ian Maxon <im...@apache.org>
Committed: Wed Apr 6 18:56:39 2016 -0700

----------------------------------------------------------------------
 .../classad-parser-new.1.ddl.aql                |  31 ++
 .../classad-parser-new.2.lib.aql                |  19 +
 .../classad-parser-new.3.ddl.aql                |  27 +
 .../classad-parser-new.4.query.aql              |  23 +
 .../classad-parser-new.5.lib.aql                |  19 +
 .../classad-parser-old.1.ddl.aql                |  31 ++
 .../classad-parser-old.2.lib.aql                |  19 +
 .../classad-parser-old.3.ddl.aql                |  25 +
 .../classad-parser-old.4.query.aql              |  23 +
 .../classad-parser-old.5.lib.aql                |  19 +
 .../invalid-format/invalid-format.1.ddl.aql     |  34 ++
 .../invalid-format/invalid-format.2.query.aql   |  27 +
 .../feeds/twitter-feed/twitter-feed.1.ddl.aql   |  54 ++
 .../twitter-feed/twitter-feed.2.update.aql      |  25 +
 .../file-not-found/file-not-found.1.ddl.aql     |  29 +
 .../file-not-found/file-not-found.2.update.aql  |  30 +
 .../temp_primary_plus_ngram_flush.1.ddl.aql     |  67 +++
 .../temp_primary_plus_ngram_flush.2.update.aql  |  46 ++
 .../temp_primary_plus_ngram_flush.3.query.aql   |  29 +
 .../global-aggregate/q01/q01.1.ddl.sqlpp        |  51 ++
 .../global-aggregate/q01/q01.2.update.sqlpp     |  30 +
 .../global-aggregate/q01/q01.3.query.sqlpp      |  23 +
 .../global-aggregate/q02/q02.1.ddl.sqlpp        |  51 ++
 .../global-aggregate/q02/q02.2.update.sqlpp     |  30 +
 .../global-aggregate/q02/q02.3.query.sqlpp      |  23 +
 .../global-aggregate/q03/q03.1.ddl.sqlpp        |  51 ++
 .../global-aggregate/q03/q03.2.update.sqlpp     |  30 +
 .../global-aggregate/q03/q03.3.query.sqlpp      |  23 +
 .../global-aggregate/q04/q04.1.ddl.sqlpp        |  51 ++
 .../global-aggregate/q04/q04.2.update.sqlpp     |  30 +
 .../global-aggregate/q04/q04.3.query.sqlpp      |  23 +
 .../q05_error/q05_error.1.ddl.sqlpp             |  51 ++
 .../q05_error/q05_error.2.update.sqlpp          |  30 +
 .../q05_error/q05_error.3.query.sqlpp           |  23 +
 .../q06_error/q06_error.1.ddl.sqlpp             |  51 ++
 .../q06_error/q06_error.2.update.sqlpp          |  30 +
 .../q06_error/q06_error.3.query.sqlpp           |  23 +
 .../q07_error/q07_error.1.ddl.sqlpp             |  51 ++
 .../q07_error/q07_error.2.update.sqlpp          |  30 +
 .../q07_error/q07_error.3.query.sqlpp           |  26 +
 .../global-aggregate/q08/q08.1.ddl.sqlpp        |  51 ++
 .../global-aggregate/q08/q08.2.update.sqlpp     |  30 +
 .../global-aggregate/q08/q08.3.query.sqlpp      |  24 +
 .../q06_forecast_revenue_change.4.query.sqlpp   |  27 +
 .../classad-parser-new/classad-parser-new.1.adm | 100 ++++
 .../classad-parser-old/classad-parser-old.1.adm |   5 +
 .../results/global-aggregate/q01/q01.1.adm      |   1 +
 .../results/global-aggregate/q02/q02.1.adm      |   1 +
 .../results/global-aggregate/q08/q08.1.adm      |  10 +
 .../temp_primary_plus_ngram_flush.3.adm         |   1 +
 .../q06_forecast_revenue_change.2.adm           |   1 +
 .../q06_forecast_revenue_change.4.ast           |  73 +++
 .../api/IStreamNotificationHandler.java         |  28 +
 .../reader/IndexingStreamRecordReader.java      | 101 ++++
 .../reader/stream/StreamRecordReader.java       |  98 ++++
 .../stream/StreamRecordReaderFactory.java       |  72 +++
 .../provider/StreamRecordReaderProvider.java    |  81 +++
 .../resources/classad-with-temporals.classads   | 134 +++++
 .../results/classad-with-temporals.adm          |   1 +
 .../dataset-with-meta-record.1.script.aql       |   1 +
 .../dataset-with-meta-record.2.ddl.aql          |  58 ++
 .../dataset-with-meta-record.3.update.aql       |  27 +
 .../dataset-with-meta-record.4.script.aql       |   1 +
 .../dataset-with-meta-record.5.query.aql        |  29 +
 .../dataset-with-meta-record.6.script.aql       |   1 +
 .../dataset-with-meta-record.5.adm              |   1 +
 .../create_and_start.sh                         |   1 +
 .../dataset-with-meta-record/stop_and_delete.sh |   3 +
 .../dataset-with-meta-record/stop_and_start.sh  |   2 +
 .../visitor/InlineColumnAliasVisitor.java       | 450 +++++++++++++++
 .../SqlppBuiltinFunctionRewriteVisitor.java     |  46 ++
 .../SqlppGlobalAggregationSugarVisitor.java     |  67 +++
 .../visitor/SqlppGroupBySugarVisitor.java       | 123 +++++
 .../rewrites/visitor/SqlppGroupByVisitor.java   | 168 ++++++
 .../visitor/SqlppInlineUdfsVisitor.java         | 237 ++++++++
 .../visitor/VariableCheckAndRewriteVisitor.java | 102 ++++
 .../lang/sqlpp/util/FunctionMapUtil.java        | 150 +++++
 .../visitor/CheckSql92AggregateVisitor.java     | 265 +++++++++
 .../lang/sqlpp/visitor/DeepCopyVisitor.java     | 415 ++++++++++++++
 .../lang/sqlpp/visitor/FreeVariableVisitor.java | 471 ++++++++++++++++
 .../AbstractSqlppExpressionScopingVisitor.java  | 284 ++++++++++
 .../AbstractSqlppSimpleExpressionVisitor.java   | 347 ++++++++++++
 .../LangExpressionToPlanTranslator.java         | 112 ++--
 .../common/AsterixHyracksIntegrationUtil.java   |  25 +-
 .../asterix/app/external/FeedOperations.java    |  34 +-
 .../apache/asterix/file/DatasetOperations.java  |  10 +-
 .../asterix/file/DataverseOperations.java       |   2 +-
 .../asterix/messaging/NCMessageBroker.java      |  19 +-
 .../asterix/test/runtime/ExecutionTest.java     |   9 +-
 .../asterix/test/runtime/ExecutionTestUtil.java |   8 +-
 .../test/runtime/SqlppExecutionTest.java        |   8 +-
 .../asterix/test/sqlpp/ParserTestExecutor.java  |   1 +
 .../queries_sqlpp/count-tweets.sqlpp            |   2 +-
 .../queries_sqlpp/denorm-cust-order.sqlpp       |   4 +-
 .../queries_sqlpp/distinct_aggregate.sqlpp      |   4 +-
 .../optimizerts/queries_sqlpp/fj-phase1.sqlpp   |   2 +-
 .../queries_sqlpp/fj-phase2-with-hints.sqlpp    |   2 +-
 .../inlined_q18_large_volume_customer.sqlpp     |   4 +-
 .../queries_sqlpp/nest_aggregate.sqlpp          |   2 +-
 .../queries_sqlpp/orders-aggreg.sqlpp           |   6 +-
 .../q01_pricing_summary_report_nt.sqlpp         |  16 +-
 .../queries_sqlpp/q03_shipping_priority.sqlpp   |   2 +-
 .../q05_local_supplier_volume.sqlpp             |   2 +-
 .../optimizerts/queries_sqlpp/q2.sqlpp          |   4 +-
 .../queries_sqlpp/query-issue562.sqlpp          |   6 +-
 .../queries_sqlpp/query-issue601.sqlpp          |   2 +-
 .../queries_sqlpp/query-issue697.sqlpp          |   2 +-
 .../queries_sqlpp/query-issue785.sqlpp          |   2 +-
 .../queries_sqlpp/query-issue810-2.sqlpp        |   2 +-
 .../queries_sqlpp/query-issue810.sqlpp          |   2 +-
 .../queries_sqlpp/query-issue827-2.sqlpp        |  16 +-
 .../split-materialization-above-join.sqlpp      |   4 +-
 .../queries_sqlpp/split-materialization.sqlpp   |   4 +-
 .../rc-format/rc-format.1.ddl.aql               |   7 +-
 .../classad-parser/classad-parser.1.ddl.aql     |  31 --
 .../classad-parser/classad-parser.2.lib.aql     |  19 -
 .../classad-parser/classad-parser.3.ddl.aql     |  27 -
 .../classad-parser/classad-parser.4.query.aql   |  23 -
 .../classad-parser/classad-parser.5.lib.aql     |  19 -
 .../classad-parser2/classad-parser2.1.ddl.aql   |  31 --
 .../classad-parser2/classad-parser2.2.lib.aql   |  19 -
 .../classad-parser2/classad-parser2.3.ddl.aql   |  25 -
 .../classad-parser2/classad-parser2.4.query.aql |  23 -
 .../classad-parser2/classad-parser2.5.lib.aql   |  19 -
 .../hdfs/large-record/large-record.1.ddl.aql    |   2 +-
 .../agg_null/agg_null.3.query.sqlpp             |   2 +-
 .../agg_null_rec/agg_null_rec.3.query.sqlpp     |  10 +-
 .../agg_null_rec_1/agg_null_rec_1.3.query.sqlpp |  10 +-
 .../agg_number/agg_number.3.query.sqlpp         |   2 +-
 .../agg_number_rec/agg_number_rec.3.query.sqlpp |  10 +-
 .../avg_double/avg_double.3.query.sqlpp         |   2 +-
 .../avg_double_null.3.query.sqlpp               |   2 +-
 .../avg_empty_01/avg_empty_01.1.ddl.sqlpp       |   2 +-
 .../avg_empty_01/avg_empty_01.2.update.sqlpp    |   2 +-
 .../avg_empty_01/avg_empty_01.3.query.sqlpp     |   4 +-
 .../avg_empty_02/avg_empty_02.1.ddl.sqlpp       |   2 +-
 .../avg_empty_02/avg_empty_02.2.update.sqlpp    |   2 +-
 .../avg_empty_02/avg_empty_02.3.query.sqlpp     |   4 +-
 .../avg_float/avg_float.3.query.sqlpp           |   2 +-
 .../avg_float_null/avg_float_nu.3.query.sqlpp   |   2 +-
 .../avg_int16/avg_int16.3.query.sqlpp           |   2 +-
 .../avg_int16_null/avg_int16_null.3.query.sqlpp |   2 +-
 .../avg_int32/avg_int32.3.query.sqlpp           |   2 +-
 .../avg_int32_null/avg_int32_null.3.query.sqlpp |   2 +-
 .../avg_int64/avg_int64.3.query.sqlpp           |   2 +-
 .../avg_int64_null/avg_int64_null.3.query.sqlpp |   2 +-
 .../avg_int8/avg_int8.3.query.sqlpp             |   2 +-
 .../avg_int8_null/avg_int8_null.3.query.sqlpp   |   2 +-
 .../avg_mixed/avg_mixed.1.ddl.sqlpp             |   2 +-
 .../avg_mixed/avg_mixed.3.query.sqlpp           |   4 +-
 .../count_01/count_01.3.query.sqlpp             |   2 +-
 .../count_empty_01/count_empty_01.1.ddl.sqlpp   |   2 +-
 .../count_empty_01.2.update.sqlpp               |   2 +-
 .../count_empty_01/count_empty_01.3.query.sqlpp |   4 +-
 .../count_empty_02/count_empty_02.1.ddl.sqlpp   |   2 +-
 .../count_empty_02.2.update.sqlpp               |   2 +-
 .../count_empty_02/count_empty_02.3.query.sqlpp |   4 +-
 .../count_null/count_null.3.query.sqlpp         |   2 +-
 .../issue395/issue395.3.query.sqlpp             |   2 +-
 .../issue412_0/issue412_0.3.query.sqlpp         |   2 +-
 .../issue412_1/issue412_1.3.query.sqlpp         |   2 +-
 .../issue425_min_hetero_list.3.query.sqlpp      |   2 +-
 .../issue425_min_hetero_list_1.3.query.sqlpp    |   2 +-
 .../issue425_sum_hetero_list.3.query.sqlpp      |   2 +-
 .../issue425_sum_hetero_list_1.3.query.sqlpp    |   2 +-
 .../issue531_string_min_max.1.ddl.sqlpp         |   4 +-
 .../issue531_string_min_max.2.update.sqlpp      |   4 +-
 .../issue531_string_min_max.3.query.sqlpp       |   8 +-
 .../max_empty_01/max_empty_01.1.ddl.sqlpp       |   2 +-
 .../max_empty_01/max_empty_01.2.update.sqlpp    |   2 +-
 .../max_empty_01/max_empty_01.3.query.sqlpp     |   4 +-
 .../max_empty_02/max_empty_02.1.ddl.sqlpp       |   2 +-
 .../max_empty_02/max_empty_02.2.update.sqlpp    |   2 +-
 .../max_empty_02/max_empty_02.3.query.sqlpp     |   4 +-
 .../min_empty_01/min_empty_01.1.ddl.sqlpp       |   2 +-
 .../min_empty_01/min_empty_01.2.update.sqlpp    |   2 +-
 .../min_empty_01/min_empty_01.3.query.sqlpp     |   4 +-
 .../min_empty_02/min_empty_02.1.ddl.sqlpp       |   2 +-
 .../min_empty_02/min_empty_02.2.update.sqlpp    |   2 +-
 .../min_empty_02/min_empty_02.3.query.sqlpp     |   4 +-
 .../min_mixed/min_mixed.1.ddl.sqlpp             |   2 +-
 .../min_mixed/min_mixed.3.query.sqlpp           |   4 +-
 .../query-issue400/query-issue400.3.query.sqlpp |   2 +-
 .../scalar_avg/scalar_avg.1.ddl.sqlpp           |   2 +-
 .../scalar_avg/scalar_avg.2.update.sqlpp        |   2 +-
 .../scalar_avg/scalar_avg.3.query.sqlpp         |  14 +-
 .../scalar_avg_empty.1.ddl.sqlpp                |   2 +-
 .../scalar_avg_empty.2.update.sqlpp             |   2 +-
 .../scalar_avg_empty.3.query.sqlpp              |   4 +-
 .../scalar_avg_null/scalar_avg_null.1.ddl.sqlpp |   2 +-
 .../scalar_avg_null.2.update.sqlpp              |   2 +-
 .../scalar_avg_null.3.query.sqlpp               |  14 +-
 .../scalar_count/scalar_count.1.ddl.sqlpp       |   2 +-
 .../scalar_count/scalar_count.2.update.sqlpp    |   2 +-
 .../scalar_count/scalar_count.3.query.sqlpp     |  16 +-
 .../scalar_count_empty.1.ddl.sqlpp              |   2 +-
 .../scalar_count_empty.2.update.sqlpp           |   2 +-
 .../scalar_count_empty.3.query.sqlpp            |   4 +-
 .../scalar_count_null.1.ddl.sqlpp               |   2 +-
 .../scalar_count_null.2.update.sqlpp            |   2 +-
 .../scalar_count_null.3.query.sqlpp             |  16 +-
 .../scalar_max/scalar_max.1.ddl.sqlpp           |   2 +-
 .../scalar_max/scalar_max.2.update.sqlpp        |   2 +-
 .../scalar_max/scalar_max.3.query.sqlpp         |  18 +-
 .../scalar_max_empty.1.ddl.sqlpp                |   2 +-
 .../scalar_max_empty.2.update.sqlpp             |   2 +-
 .../scalar_max_empty.3.query.sqlpp              |   4 +-
 .../scalar_max_null/scalar_max_null.1.ddl.sqlpp |   2 +-
 .../scalar_max_null.2.update.sqlpp              |   2 +-
 .../scalar_max_null.3.query.sqlpp               |  18 +-
 .../scalar_min/scalar_min.1.ddl.sqlpp           |   2 +-
 .../scalar_min/scalar_min.2.update.sqlpp        |   2 +-
 .../scalar_min/scalar_min.3.query.sqlpp         |  18 +-
 .../scalar_min_empty.1.ddl.sqlpp                |   2 +-
 .../scalar_min_empty.2.update.sqlpp             |   2 +-
 .../scalar_min_empty.3.query.sqlpp              |   4 +-
 .../scalar_min_null/scalar_min_null.1.ddl.sqlpp |   2 +-
 .../scalar_min_null.2.update.sqlpp              |   2 +-
 .../scalar_min_null.3.query.sqlpp               |  18 +-
 .../scalar_sum/scalar_sum.1.ddl.sqlpp           |   2 +-
 .../scalar_sum/scalar_sum.2.update.sqlpp        |   2 +-
 .../scalar_sum/scalar_sum.3.query.sqlpp         |  14 +-
 .../scalar_sum_empty.1.ddl.sqlpp                |   2 +-
 .../scalar_sum_empty.2.update.sqlpp             |   2 +-
 .../scalar_sum_empty.3.query.sqlpp              |   4 +-
 .../scalar_sum_null/scalar_sum_null.1.ddl.sqlpp |   2 +-
 .../scalar_sum_null.2.update.sqlpp              |   2 +-
 .../scalar_sum_null.3.query.sqlpp               |  14 +-
 .../sum_double/sum_double.3.query.sqlpp         |   2 +-
 .../sum_double_null.3.query.sqlpp               |   2 +-
 .../sum_empty_01/sum_empty_01.1.ddl.sqlpp       |   2 +-
 .../sum_empty_01/sum_empty_01.2.update.sqlpp    |   2 +-
 .../sum_empty_01/sum_empty_01.3.query.sqlpp     |   4 +-
 .../sum_empty_02/sum_empty_02.1.ddl.sqlpp       |   2 +-
 .../sum_empty_02/sum_empty_02.2.update.sqlpp    |   2 +-
 .../sum_empty_02/sum_empty_02.3.query.sqlpp     |   4 +-
 .../sum_float/sum_float.3.query.sqlpp           |   2 +-
 .../sum_float_null/sum_float_null.3.query.sqlpp |   2 +-
 .../sum_int16/sum_int16.3.query.sqlpp           |   2 +-
 .../sum_int16_null/sum_int16_null.3.query.sqlpp |   2 +-
 .../sum_int32/sum_int32.3.query.sqlpp           |   2 +-
 .../sum_int32_null/sum_int32_null.3.query.sqlpp |   2 +-
 .../sum_int64/sum_int64.3.query.sqlpp           |   2 +-
 .../sum_int64_null/sum_int64_null.3.query.sqlpp |   2 +-
 .../sum_int8/sum_int8.3.query.sqlpp             |   2 +-
 .../sum_int8_null/sum_int8_null.3.query.sqlpp   |   2 +-
 .../sum_mixed/sum_mixed.1.ddl.sqlpp             |   2 +-
 .../sum_mixed/sum_mixed.3.query.sqlpp           |   4 +-
 .../sum_null-with-pred.1.ddl.sqlpp              |   4 +-
 .../sum_null-with-pred.2.update.sqlpp           |   4 +-
 .../sum_null-with-pred.3.query.sqlpp            |   6 +-
 .../sum_numeric_null.1.ddl.sqlpp                |   2 +-
 .../sum_numeric_null.2.update.sqlpp             |   2 +-
 .../sum_numeric_null.3.query.sqlpp              |   4 +-
 .../aggregate/agg_null/agg_null.3.query.sqlpp   |   2 +-
 .../agg_null_rec/agg_null_rec.3.query.sqlpp     |  10 +-
 .../agg_null_rec_1/agg_null_rec_1.3.query.sqlpp |  10 +-
 .../agg_number/agg_number.3.query.sqlpp         |   2 +-
 .../agg_number_rec/agg_number_rec.3.query.sqlpp |  10 +-
 .../avg_double/avg_double.3.query.sqlpp         |   2 +-
 .../avg_double_null.3.query.sqlpp               |   2 +-
 .../avg_empty_01/avg_empty_01.3.query.sqlpp     |   2 +-
 .../avg_empty_02/avg_empty_02.3.query.sqlpp     |   2 +-
 .../aggregate/avg_float/avg_float.3.query.sqlpp |   2 +-
 .../avg_float_null/avg_float_nu.3.query.sqlpp   |   2 +-
 .../aggregate/avg_int16/avg_int16.3.query.sqlpp |   2 +-
 .../avg_int16_null/avg_int16_null.3.query.sqlpp |   2 +-
 .../aggregate/avg_int32/avg_int32.3.query.sqlpp |   2 +-
 .../avg_int32_null/avg_int32_null.3.query.sqlpp |   2 +-
 .../aggregate/avg_int64/avg_int64.3.query.sqlpp |   2 +-
 .../avg_int64_null/avg_int64_null.3.query.sqlpp |   2 +-
 .../aggregate/avg_int8/avg_int8.3.query.sqlpp   |   2 +-
 .../avg_int8_null/avg_int8_null.3.query.sqlpp   |   2 +-
 .../aggregate/avg_mixed/avg_mixed.3.query.sqlpp |   2 +-
 .../aggregate/count_01/count_01.3.query.sqlpp   |   2 +-
 .../count_empty_01/count_empty_01.3.query.sqlpp |   2 +-
 .../count_empty_02/count_empty_02.3.query.sqlpp |   2 +-
 .../count_null/count_null.3.query.sqlpp         |   2 +-
 .../aggregate/issue395/issue395.3.query.sqlpp   |   2 +-
 .../issue412_0/issue412_0.3.query.sqlpp         |   2 +-
 .../issue412_1/issue412_1.3.query.sqlpp         |   2 +-
 .../issue425_min_hetero_list.3.query.sqlpp      |   2 +-
 .../issue425_min_hetero_list_1.3.query.sqlpp    |   2 +-
 .../issue425_sum_hetero_list.3.query.sqlpp      |   2 +-
 .../issue425_sum_hetero_list_1.3.query.sqlpp    |   2 +-
 .../issue531_string_min_max.3.query.sqlpp       |   4 +-
 .../max_empty_01/max_empty_01.3.query.sqlpp     |   2 +-
 .../max_empty_02/max_empty_02.3.query.sqlpp     |   2 +-
 .../min_empty_01/min_empty_01.3.query.sqlpp     |   2 +-
 .../min_empty_02/min_empty_02.3.query.sqlpp     |   2 +-
 .../aggregate/min_mixed/min_mixed.3.query.sqlpp |   2 +-
 .../query-issue400/query-issue400.3.query.sqlpp |   2 +-
 .../scalar_avg/scalar_avg.3.query.sqlpp         |  12 +-
 .../scalar_avg_empty.3.query.sqlpp              |   2 +-
 .../scalar_avg_null.3.query.sqlpp               |  12 +-
 .../scalar_count/scalar_count.3.query.sqlpp     |  14 +-
 .../scalar_count_empty.3.query.sqlpp            |   2 +-
 .../scalar_count_null.3.query.sqlpp             |  14 +-
 .../scalar_max/scalar_max.3.query.sqlpp         |  16 +-
 .../scalar_max_empty.3.query.sqlpp              |   2 +-
 .../scalar_max_null.3.query.sqlpp               |  16 +-
 .../scalar_min/scalar_min.3.query.sqlpp         |  16 +-
 .../scalar_min_empty.3.query.sqlpp              |   2 +-
 .../scalar_min_null.3.query.sqlpp               |  16 +-
 .../scalar_sum/scalar_sum.3.query.sqlpp         |  12 +-
 .../scalar_sum_empty.3.query.sqlpp              |   2 +-
 .../scalar_sum_null.3.query.sqlpp               |  12 +-
 .../sum_double/sum_double.3.query.sqlpp         |   2 +-
 .../sum_double_null.3.query.sqlpp               |   2 +-
 .../sum_empty_01/sum_empty_01.3.query.sqlpp     |   2 +-
 .../sum_empty_02/sum_empty_02.3.query.sqlpp     |   2 +-
 .../aggregate/sum_float/sum_float.3.query.sqlpp |   2 +-
 .../sum_float_null/sum_float_null.3.query.sqlpp |   2 +-
 .../aggregate/sum_int16/sum_int16.3.query.sqlpp |   2 +-
 .../sum_int16_null/sum_int16_null.3.query.sqlpp |   2 +-
 .../aggregate/sum_int32/sum_int32.3.query.sqlpp |   2 +-
 .../sum_int32_null/sum_int32_null.3.query.sqlpp |   2 +-
 .../aggregate/sum_int64/sum_int64.3.query.sqlpp |   2 +-
 .../sum_int64_null/sum_int64_null.3.query.sqlpp |   2 +-
 .../aggregate/sum_int8/sum_int8.3.query.sqlpp   |   2 +-
 .../sum_int8_null/sum_int8_null.3.query.sqlpp   |   2 +-
 .../aggregate/sum_mixed/sum_mixed.3.query.sqlpp |   2 +-
 .../sum_null-with-pred.1.ddl.sqlpp              |   2 +-
 .../sum_null-with-pred.2.update.sqlpp           |   2 +-
 .../sum_null-with-pred.3.query.sqlpp            |   4 +-
 .../sum_numeric_null.1.ddl.sqlpp                |   2 +-
 .../sum_numeric_null.2.update.sqlpp             |   2 +-
 .../sum_numeric_null.3.query.sqlpp              |   4 +-
 .../cross-dv03/cross-dv03.3.query.sqlpp         |   2 +-
 .../custord/freq-clerk/freq-clerk.3.query.sqlpp |   2 +-
 .../custord/join_q_06/join_q_06.3.query.sqlpp   |   2 +-
 .../custord/join_q_07/join_q_07.3.query.sqlpp   |   2 +-
 .../queries_sqlpp/dapd/q2/q2.3.query.sqlpp      |   4 +-
 .../query-issue382/query-issue382.3.query.sqlpp |   2 +-
 .../rc-format/rc-format.1.ddl.sqlpp             |   8 +-
 ...pricing_summary_report_failure.3.query.sqlpp |  16 +-
 .../feeds/feeds_05/feeds_05.3.query.sqlpp       |   2 +-
 .../feeds/feeds_07/feeds_07.3.query.sqlpp       |   2 +-
 .../feeds/feeds_08/feeds_08.3.query.sqlpp       |   2 +-
 .../feeds/feeds_09/feeds_09.3.query.sqlpp       |   2 +-
 .../feeds/feeds_10/feeds_10.3.query.sqlpp       |   2 +-
 .../flwor/grpby01/grpby01.3.query.sqlpp         |   2 +-
 .../flwor/grpby02/grpby02.3.query.sqlpp         |   2 +-
 .../fuzzyjoin/dblp-1_1/dblp-1_1.3.query.sqlpp   |   2 +-
 .../dblp-2_5.3/dblp-2_5.3.3.query.sqlpp         |   2 +-
 .../group-by/core-01/core-01.3.query.sqlpp      |   2 +-
 .../group-by/core-02/core-02.3.query.sqlpp      |   2 +-
 .../group-by/core-03/core-02.3.query.sqlpp      |   2 +-
 .../group-by/core-05/core-05.3.query.sqlpp      |   2 +-
 .../group-by/sugar-01/sugar-01.3.query.sqlpp    |   2 +-
 .../group-by/sugar-02/sugar-02.3.query.sqlpp    |   2 +-
 .../group-by/sugar-03/sugar-03.3.query.sqlpp    |   2 +-
 .../group-by/sugar-05/sugar-05.3.query.sqlpp    |   2 +-
 .../hdfs/hdfs_02/hdfs_02.3.query.sqlpp          |   2 +-
 .../hdfs/hdfs_03/hdfs_03.3.query.sqlpp          |   2 +-
 .../hdfs_shortcircuit.3.query.sqlpp             |   2 +-
 .../issue_251_dataset_hint_6.3.query.sqlpp      |   2 +-
 .../list/listify_03/listify_03.3.query.sqlpp    |   4 +-
 .../issue289_query/issue289_query.3.query.sqlpp |   2 +-
 .../assign-reuse/assign-reuse.3.query.sqlpp     |   4 +-
 .../groupby-orderby-count.3.query.sqlpp         |   4 +-
 .../query-issue258.2.update.sqlpp               |   2 +-
 .../everysat_03/everysat_03.3.query.sqlpp       |   2 +-
 .../somesat_03/somesat_03.3.query.sqlpp         |   2 +-
 .../somesat_04/somesat_04.3.query.sqlpp         |   2 +-
 .../count-nullable/count-nullable.3.query.sqlpp |   2 +-
 ...ell-aggregation-with-filtering.3.query.sqlpp |   2 +-
 .../cell-aggregation.3.query.sqlpp              |   2 +-
 .../q01_pricing_summary_report_nt.3.query.sqlpp |  16 +-
 .../temporal/agg_max/agg_max.3.query.sqlpp      |  10 +-
 .../temporal/agg_min/agg_min.3.query.sqlpp      |  10 +-
 .../overlap_bins_gby_1.3.query.sqlpp            |   2 +-
 .../overlap_bins_gby_3.3.query.sqlpp            |   4 +-
 .../query-ASTERIXDB-1331.25.query.sqlpp         |   4 +-
 .../tinysocial-suite.14.query.sqlpp             |   2 +-
 .../tinysocial-suite.22.query.sqlpp             |   2 +-
 .../tinysocial-suite.14.query.sqlpp             |   2 +-
 .../tinysocial-suite.22.query.sqlpp             |   2 +-
 .../tinysocial-suite.25.query.sqlpp             |   4 +-
 .../q01_pricing_summary_report_nt.3.query.sqlpp |  14 +-
 .../q02_minimum_cost_supplier.3.query.sqlpp     |   2 +-
 .../q03_shipping_priority_nt.3.query.sqlpp      |   2 +-
 .../q04_order_priority.3.query.sqlpp            |   2 +-
 .../q05_local_supplier_volume.3.query.sqlpp     |   2 +-
 .../q06_forecast_revenue_change.3.query.sqlpp   |  10 +-
 .../q07_volume_shipping.3.query.sqlpp           |   2 +-
 .../q08_national_market_share.3.query.sqlpp     |   4 +-
 .../q09_product_type_profit_nt.3.query.sqlpp    |   2 +-
 .../q10_returned_item.3.query.sqlpp             |   2 +-
 .../q10_returned_item_int64.3.query.sqlpp       |   2 +-
 .../q11_important_stock.3.query.sqlpp           |   4 +-
 .../q12_shipping/q12_shipping.3.query.sqlpp     |   4 +-
 .../q13_customer_distribution.3.query.sqlpp     |   4 +-
 .../q14_promotion_effect.3.query.sqlpp          |   4 +-
 .../q15_top_supplier.3.query.sqlpp              |   4 +-
 ...16_parts_supplier_relationship.3.query.sqlpp |   2 +-
 .../q17_large_gby_variant.3.query.sqlpp         |  20 +-
 ...7_small_quantity_order_revenue.3.query.sqlpp |   4 +-
 .../q18_large_volume_customer.3.query.sqlpp     |   4 +-
 .../q19_discounted_revenue.3.query.sqlpp        |   2 +-
 .../q20_potential_part_promotion.3.query.sqlpp  |   2 +-
 ...pliers_who_kept_orders_waiting.3.query.sqlpp |  10 +-
 .../q22_global_sales_opportunity.3.query.sqlpp  |   4 +-
 .../query-issue601/query-issue601.3.query.sqlpp |   2 +-
 .../query-issue638/query-issue638.3.query.sqlpp |   4 +-
 .../query-issue785-2.3.query.sqlpp              |   2 +-
 .../query-issue785/query-issue785.3.query.sqlpp |   4 +-
 .../query-issue786/query-issue786.3.query.sqlpp |   2 +-
 .../q06_forecast_revenue_change.3.query.sqlpp   |   7 +-
 .../q11_important_stock.3.query.sqlpp           |   8 +-
 .../q13_customer_distribution.3.query.sqlpp     |   2 +-
 .../q15_top_supplier.3.query.sqlpp              |   7 +-
 ...7_small_quantity_order_revenue.3.query.sqlpp |   9 +-
 .../q19_discounted_revenue.3.query.sqlpp        |  13 +-
 .../q22_global_sales_opportunity.3.query.sqlpp  |   6 +-
 .../query-issue562/query-issue562.3.query.sqlpp |   2 +-
 .../query-issue810-2.3.query.sqlpp              |   2 +-
 .../query-issue810-3.3.query.sqlpp              |   4 +-
 .../query-issue810/query-issue810.3.query.sqlpp |   4 +-
 .../nest_aggregate/nest_aggregate.3.query.sqlpp |   2 +-
 .../nest_aggregate2.3.query.sqlpp               |   2 +-
 .../q01_pricing_summary_report_nt.3.query.sqlpp |  14 +-
 .../q02_minimum_cost_supplier.3.query.sqlpp     |   2 +-
 .../q03_shipping_priority_nt.3.query.sqlpp      |   2 +-
 .../q05_local_supplier_volume.3.query.sqlpp     |   2 +-
 .../q06_forecast_revenue_change.3.query.sqlpp   |   2 +-
 .../q07_volume_shipping.3.query.sqlpp           |   2 +-
 .../q08_national_market_share.3.query.sqlpp     |   4 +-
 .../q09_product_type_profit_nt.3.query.sqlpp    |   2 +-
 .../q10_returned_item.3.query.sqlpp             |   2 +-
 .../q10_returned_item_int64.3.query.sqlpp       |   2 +-
 .../q11_important_stock.3.query.sqlpp           |   4 +-
 .../q12_shipping/q12_shipping.3.query.sqlpp     |   4 +-
 .../q13_customer_distribution.3.query.sqlpp     |   4 +-
 .../q14_promotion_effect.3.query.sqlpp          |   4 +-
 .../q15_top_supplier.3.query.sqlpp              |   4 +-
 ...16_parts_supplier_relationship.3.query.sqlpp |   2 +-
 .../q17_large_gby_variant.3.query.sqlpp         |  20 +-
 ...7_small_quantity_order_revenue.3.query.sqlpp |   4 +-
 .../q18_large_volume_customer.3.query.sqlpp     |   4 +-
 .../q19_discounted_revenue.3.query.sqlpp        |   2 +-
 .../q20_potential_part_promotion.3.query.sqlpp  |   2 +-
 ...pliers_who_kept_orders_waiting.3.query.sqlpp |   8 +-
 .../q22_global_sales_opportunity.3.query.sqlpp  |   4 +-
 .../query-issue562/query-issue562.3.query.sqlpp |   6 +-
 .../query-issue785-2.3.query.sqlpp              |   2 +-
 .../query-issue785/query-issue785.3.query.sqlpp |   4 +-
 .../query-issue786/query-issue786.3.query.sqlpp |   2 +-
 .../query-issue810-2.3.query.sqlpp              |   2 +-
 .../query-issue810-3.3.query.sqlpp              |   2 +-
 .../query-issue810/query-issue810.3.query.sqlpp |   2 +-
 .../query-issue827-2.3.query.sqlpp              |  16 +-
 .../query-issue827/query-issue827.3.query.sqlpp |   4 +-
 .../nest_aggregate/nest_aggregate.3.query.sqlpp |   2 +-
 .../nest_aggregate2.3.query.sqlpp               |   2 +-
 .../q01_pricing_summary_report_nt.3.query.sqlpp |  16 +-
 .../q02_minimum_cost_supplier.3.query.sqlpp     |   2 +-
 .../q03_shipping_priority_nt.3.query.sqlpp      |   2 +-
 .../q05_local_supplier_volume.3.query.sqlpp     |   2 +-
 .../q06_forecast_revenue_change.3.query.sqlpp   |   2 +-
 .../q07_volume_shipping.3.query.sqlpp           |   2 +-
 .../q08_national_market_share.3.query.sqlpp     |   4 +-
 .../q09_product_type_profit_nt.3.query.sqlpp    |   2 +-
 .../q10_returned_item.3.query.sqlpp             |   2 +-
 .../q10_returned_item_int64.3.query.sqlpp       |   2 +-
 .../q11_important_stock.3.query.sqlpp           |   4 +-
 .../q12_shipping/q12_shipping.3.query.sqlpp     |   4 +-
 .../q13_customer_distribution.3.query.sqlpp     |   4 +-
 .../q14_promotion_effect.3.query.sqlpp          |   6 +-
 .../q15_top_supplier.3.query.sqlpp              |   4 +-
 ...16_parts_supplier_relationship.3.query.sqlpp |   6 +-
 .../q17_large_gby_variant.3.query.sqlpp         |  20 +-
 ...7_small_quantity_order_revenue.3.query.sqlpp |   4 +-
 .../q18_large_volume_customer.3.query.sqlpp     |   4 +-
 .../q19_discounted_revenue.3.query.sqlpp        |   2 +-
 .../q20_potential_part_promotion.3.query.sqlpp  |   2 +-
 ...pliers_who_kept_orders_waiting.3.query.sqlpp |   8 +-
 .../q22_global_sales_opportunity.3.query.sqlpp  |   6 +-
 .../query-issue562/query-issue562.3.query.sqlpp |   6 +-
 .../query-issue638/query-issue638.3.query.sqlpp |   2 +-
 .../query-issue785-2.3.query.sqlpp              |   2 +-
 .../query-issue785/query-issue785.3.query.sqlpp |   4 +-
 .../query-issue786/query-issue786.3.query.sqlpp |   2 +-
 .../query-issue810-2.3.query.sqlpp              |   2 +-
 .../query-issue810-3.3.query.sqlpp              |   2 +-
 .../query-issue810/query-issue810.3.query.sqlpp |   2 +-
 .../query-issue827-2.3.query.sqlpp              |  16 +-
 .../query-issue827/query-issue827.3.query.sqlpp |   4 +-
 .../query-issue489/query-issue489.2.query.sqlpp |   2 +-
 .../query-issue489/query-issue489.4.query.sqlpp |   2 +-
 .../classad-parser/classad-parser.1.adm         | 100 ----
 .../classad-parser2/classad-parser2.1.adm       |   5 -
 .../feed-with-external-parser.1.adm             | 198 +++----
 .../q06_forecast_revenue_change.3.ast           | 148 ++---
 .../src/test/resources/runtimets/testsuite.xml  | 101 ++--
 .../resources/runtimets/testsuite_sqlpp.xml     |  49 +-
 .../common/context/DatasetLifecycleManager.java |  21 +-
 .../context/PrimaryIndexOperationTracker.java   |  26 +-
 .../asterix/common/utils/StoragePathUtil.java   |   2 +-
 .../apache/asterix/test/aql/TestExecutor.java   | 542 ++++++++++---------
 .../org/apache/asterix/test/aql/TestHelper.java |  34 --
 asterixdb/asterix-external-data/pom.xml         |   4 +-
 .../adapter/factory/GenericAdapterFactory.java  |  29 +-
 .../external/api/AsterixInputStream.java        |   8 +-
 .../asterix/external/api/IRecordReader.java     |   9 +-
 .../dataflow/ChangeFeedDataFlowController.java  |   4 +-
 .../ChangeFeedWithMetaDataFlowController.java   |   4 +-
 .../dataflow/FeedRecordDataFlowController.java  |   5 +-
 .../FeedWithMetaDataFlowController.java         |   3 +-
 .../external/input/HDFSDataSourceFactory.java   |  46 +-
 .../stream/AbstractStreamRecordReader.java      | 116 ----
 .../AbstractStreamRecordReaderFactory.java      |  95 ----
 .../stream/EmptyLineSeparatedRecordReader.java  |   7 +-
 .../EmptyLineSeparatedRecordReaderFactory.java  |  43 --
 .../record/reader/stream/LineRecordReader.java  | 147 ++---
 .../reader/stream/LineRecordReaderFactory.java  |  52 --
 .../reader/stream/QuotedLineRecordReader.java   | 134 ++---
 .../stream/SemiStructuredRecordReader.java      |  16 +-
 .../SemiStructuredRecordReaderFactory.java      |  51 --
 .../twitter/TwitterRecordReaderFactory.java     |  19 +-
 .../input/stream/AsterixInputStreamReader.java  |   3 +-
 .../external/input/stream/HDFSInputStream.java  |   9 +-
 .../input/stream/LocalFSInputStream.java        |  54 +-
 .../factory/LocalFSInputStreamFactory.java      |  42 +-
 .../provider/DataflowControllerProvider.java    |   9 +-
 .../provider/DatasourceFactoryProvider.java     |  74 +--
 .../provider/ParserFactoryProvider.java         |   2 +-
 .../external/util/ExternalDataConstants.java    |  10 +-
 .../external/util/ExternalDataUtils.java        |  20 +-
 .../asterix/external/util/FeedLogManager.java   |  34 +-
 .../apache/asterix/external/util/FeedUtils.java |  32 +-
 .../external/util/FileSystemWatcher.java        | 185 ++++---
 .../external/util/LocalFileSystemUtils.java     |  15 +-
 .../external/classad/test/ClassAdToADMTest.java |  97 +++-
 .../asterix/external/library/ClassAdParser.java |  74 ++-
 .../adapter/TestTypedAdapterFactory.java        |   8 +-
 .../parser/test/RecordWithMetaTest.java         |  15 +-
 .../record-parser/record-parser.1.adm           | 200 +++----
 .../external_index/external_index.2.ddl.aql     |   6 +-
 .../test/resources/transactionts/testsuite.xml  |   6 +-
 asterixdb/asterix-lang-common/pom.xml           |   4 +-
 .../lang/common/clause/GroupbyClause.java       |  12 +
 .../asterix/lang/common/clause/LimitClause.java |   4 +
 .../lang/common/expression/CallExpr.java        |   6 +-
 .../lang/common/expression/OperatorExpr.java    |  39 +-
 .../asterix/lang/common/statement/Query.java    |  12 +
 .../CloneAndSubstituteVariablesVisitor.java     |   8 +-
 .../asterix/lang/sqlpp/clause/SelectBlock.java  |   4 +
 .../rewrites/SqlppFunctionBodyRewriter.java     |   3 +
 .../lang/sqlpp/rewrites/SqlppQueryRewriter.java |  41 +-
 .../lang/sqlpp/util/SqlppRewriteUtil.java       |  19 +-
 .../lang/sqlpp/util/SqlppVariableUtil.java      |  84 +++
 .../sqlpp/visitor/InlineColumnAliasVisitor.java | 447 ---------------
 .../sqlpp/visitor/SqlppAstPrintVisitor.java     |  24 +
 .../visitor/SqlppDeleteRewriteVisitor.java      |   2 +-
 .../sqlpp/visitor/SqlppGroupBySugarVisitor.java | 127 -----
 .../lang/sqlpp/visitor/SqlppGroupByVisitor.java | 170 ------
 .../sqlpp/visitor/SqlppInlineUdfsVisitor.java   | 236 --------
 .../SqlppSubstituteVariablesVisitor.java        |   7 +-
 .../lang/sqlpp/visitor/UsedVariableVisitor.java | 362 -------------
 .../visitor/VariableCheckAndRewriteVisitor.java | 505 -----------------
 .../metadata/declared/AqlMetadataProvider.java  |  21 +-
 .../asterix/metadata/utils/DatasetUtils.java    |  17 +-
 .../asterix/om/util/AsterixRuntimeUtil.java     |   6 +-
 .../storage/LSMIndexFileProperties.java         |   2 +-
 asterixdb/asterix-runtime/pom.xml               |  10 -
 .../PersistentLocalResourceRepository.java      |  33 +-
 567 files changed, 7914 insertions(+), 4599 deletions(-)
----------------------------------------------------------------------



[34/50] [abbrv] incubator-asterixdb git commit: Merge branch 'master' into hyracks-merge2

Posted by im...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/common/AsterixHyracksIntegrationUtil.java
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/main/java/org/apache/asterix/api/common/AsterixHyracksIntegrationUtil.java
index c67eb70,0000000..cc50b75
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/common/AsterixHyracksIntegrationUtil.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/common/AsterixHyracksIntegrationUtil.java
@@@ -1,211 -1,0 +1,232 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +package org.apache.asterix.api.common;
 +
 +import java.io.File;
++import java.util.ArrayList;
 +import java.util.Arrays;
 +import java.util.EnumSet;
 +import java.util.List;
 +
 +import org.apache.asterix.common.config.AsterixPropertiesAccessor;
 +import org.apache.asterix.common.config.GlobalConfig;
 +import org.apache.asterix.hyracks.bootstrap.CCApplicationEntryPoint;
 +import org.apache.asterix.hyracks.bootstrap.NCApplicationEntryPoint;
 +import org.apache.commons.io.FileUtils;
 +import org.apache.hyracks.api.client.HyracksConnection;
 +import org.apache.hyracks.api.client.IHyracksClientConnection;
 +import org.apache.hyracks.api.job.JobFlag;
 +import org.apache.hyracks.api.job.JobId;
 +import org.apache.hyracks.api.job.JobSpecification;
 +import org.apache.hyracks.control.cc.ClusterControllerService;
 +import org.apache.hyracks.control.common.controllers.CCConfig;
 +import org.apache.hyracks.control.common.controllers.NCConfig;
 +import org.apache.hyracks.control.nc.NodeControllerService;
 +
 +public class AsterixHyracksIntegrationUtil {
 +
 +    private static final String IO_DIR_KEY = "java.io.tmpdir";
 +    public static final int DEFAULT_HYRACKS_CC_CLIENT_PORT = 1098;
 +    public static final int DEFAULT_HYRACKS_CC_CLUSTER_PORT = 1099;
 +
 +    public static ClusterControllerService cc;
 +    public static NodeControllerService[] ncs;
 +    public static IHyracksClientConnection hcc;
 +
 +    private static AsterixPropertiesAccessor propertiesAccessor;
 +
 +    public static void init(boolean deleteOldInstanceData) throws Exception {
 +        propertiesAccessor = new AsterixPropertiesAccessor();
 +        ncs = new NodeControllerService[propertiesAccessor.getNodeNames().size()];
 +        if (deleteOldInstanceData) {
 +            deleteTransactionLogs();
 +            removeTestStorageFiles();
 +        }
 +
 +        CCConfig ccConfig = new CCConfig();
 +        ccConfig.clusterNetIpAddress = "127.0.0.1";
 +        ccConfig.clientNetIpAddress = "127.0.0.1";
 +        ccConfig.clientNetPort = DEFAULT_HYRACKS_CC_CLIENT_PORT;
 +        ccConfig.clusterNetPort = DEFAULT_HYRACKS_CC_CLUSTER_PORT;
 +        ccConfig.defaultMaxJobAttempts = 0;
 +        ccConfig.resultTTL = 30000;
 +        ccConfig.resultSweepThreshold = 1000;
 +        ccConfig.appCCMainClass = CCApplicationEntryPoint.class.getName();
 +        // ccConfig.useJOL = true;
 +        cc = new ClusterControllerService(ccConfig);
 +        cc.start();
 +
 +        // Starts ncs.
 +        int n = 0;
 +        List<String> nodes = propertiesAccessor.getNodeNames();
 +        for (String ncName : nodes) {
 +            NCConfig ncConfig1 = new NCConfig();
 +            ncConfig1.ccHost = "localhost";
 +            ncConfig1.ccPort = DEFAULT_HYRACKS_CC_CLUSTER_PORT;
 +            ncConfig1.clusterNetIPAddress = "127.0.0.1";
 +            ncConfig1.dataIPAddress = "127.0.0.1";
 +            ncConfig1.resultIPAddress = "127.0.0.1";
 +            ncConfig1.nodeId = ncName;
 +            ncConfig1.resultTTL = 30000;
 +            ncConfig1.resultSweepThreshold = 1000;
 +            ncConfig1.appArgs = Arrays.asList("-virtual-NC");
 +            String tempPath = System.getProperty(IO_DIR_KEY);
 +            if (tempPath.endsWith(File.separator)) {
 +                tempPath = tempPath.substring(0, tempPath.length() - 1);
 +            }
 +            System.err.println("Using the path: " + tempPath);
 +            // get initial partitions from properties
 +            String[] nodeStores = propertiesAccessor.getStores().get(ncName);
 +            if (nodeStores == null) {
 +                throw new Exception("Coudn't find stores for NC: " + ncName);
 +            }
 +            String tempDirPath = System.getProperty(IO_DIR_KEY);
 +            if (!tempDirPath.endsWith(File.separator)) {
 +                tempDirPath += File.separator;
 +            }
 +            for (int p = 0; p < nodeStores.length; p++) {
 +                // create IO devices based on stores
 +                String iodevicePath = tempDirPath + ncConfig1.nodeId + File.separator + nodeStores[p];
 +                File ioDeviceDir = new File(iodevicePath);
 +                ioDeviceDir.mkdirs();
 +                if (p == 0) {
 +                    ncConfig1.ioDevices = iodevicePath;
 +                } else {
 +                    ncConfig1.ioDevices += "," + iodevicePath;
 +                }
 +            }
 +            ncConfig1.appNCMainClass = NCApplicationEntryPoint.class.getName();
 +            NodeControllerService nodeControllerService = new NodeControllerService(ncConfig1);
 +            ncs[n] = nodeControllerService;
 +            Thread ncStartThread = new Thread() {
 +                @Override
 +                public void run() {
 +                    try {
 +                        nodeControllerService.start();
 +                    } catch (Exception e) {
 +                        e.printStackTrace();
 +                    }
 +                }
 +            };
 +            ncStartThread.start();
 +            ++n;
 +        }
 +        hcc = new HyracksConnection(cc.getConfig().clientNetIpAddress, cc.getConfig().clientNetPort);
 +    }
 +
 +    public static String[] getNcNames() {
 +        return propertiesAccessor.getNodeNames().toArray(new String[propertiesAccessor.getNodeNames().size()]);
 +    }
 +
 +    public static IHyracksClientConnection getHyracksClientConnection() {
 +        return hcc;
 +    }
 +
 +    public static void deinit(boolean deleteOldInstanceData) throws Exception {
++        //stop NCs
++        ArrayList<Thread> stopNCThreads = new ArrayList<>();
 +        for (int n = 0; n < ncs.length; ++n) {
-             if (ncs[n] != null)
-                 ncs[n].stop();
++            NodeControllerService nodeControllerService = ncs[n];
++            if (nodeControllerService != null) {
++                Thread ncStopThread = new Thread() {
++                    @Override
++                    public void run() {
++                        try {
++                            nodeControllerService.stop();
++                        } catch (Exception e) {
++                            e.printStackTrace();
++                        }
++                    }
++                };
++                stopNCThreads.add(ncStopThread);
++                ncStopThread.start();
++            }
++        }
 +
++        //make sure all NCs stopped
++        for (Thread stopNcTheard : stopNCThreads) {
++            stopNcTheard.join();
 +        }
++
 +        if (cc != null) {
 +            cc.stop();
 +        }
 +
 +        if (deleteOldInstanceData) {
 +            deleteTransactionLogs();
 +            removeTestStorageFiles();
 +        }
 +    }
 +
 +    public static void runJob(JobSpecification spec) throws Exception {
 +        GlobalConfig.ASTERIX_LOGGER.info(spec.toJSON().toString());
 +        JobId jobId = hcc.startJob(spec, EnumSet.of(JobFlag.PROFILE_RUNTIME));
 +        GlobalConfig.ASTERIX_LOGGER.info(jobId.toString());
 +        hcc.waitForCompletion(jobId);
 +    }
 +
 +    public static void removeTestStorageFiles() {
 +        File dir = new File(System.getProperty(IO_DIR_KEY));
 +        for (String ncName : propertiesAccessor.getNodeNames()) {
 +            File ncDir = new File(dir, ncName);
 +            FileUtils.deleteQuietly(ncDir);
 +        }
 +    }
 +
 +    private static void deleteTransactionLogs() throws Exception {
 +        for (String ncId : propertiesAccessor.getNodeNames()) {
 +            File log = new File(propertiesAccessor.getTransactionLogDirs().get(ncId));
 +            if (log.exists()) {
 +                FileUtils.deleteDirectory(log);
 +            }
 +        }
 +    }
 +
 +    /**
 +     * main method to run a simple 2 node cluster in-process
 +     * suggested VM arguments: <code>-enableassertions -Xmx2048m -Dfile.encoding=UTF-8</code>
 +     *
 +     * @param args
 +     *            unused
 +     */
 +    public static void main(String[] args) {
 +        Runtime.getRuntime().addShutdownHook(new Thread() {
 +            @Override
 +            public void run() {
 +                try {
 +                    deinit(false);
 +                } catch (Exception e) {
 +                    e.printStackTrace();
 +                }
 +            }
 +        });
 +        try {
 +            System.setProperty(GlobalConfig.CONFIG_FILE_PROPERTY, "asterix-build-configuration.xml");
 +
 +            init(false);
 +            while (true) {
 +                Thread.sleep(10000);
 +            }
 +        } catch (Exception e) {
 +            e.printStackTrace();
 +            System.exit(1);
 +        }
 +    }
 +
 +}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/external/FeedOperations.java
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/main/java/org/apache/asterix/app/external/FeedOperations.java
index 5cd490a,0000000..d8f1893
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/external/FeedOperations.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/external/FeedOperations.java
@@@ -1,273 -1,0 +1,273 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +package org.apache.asterix.app.external;
 +
 +import java.util.Collection;
 +import java.util.List;
++import java.util.Set;
++import java.util.TreeSet;
 +
 +import org.apache.asterix.common.exceptions.AsterixException;
 +import org.apache.asterix.common.utils.StoragePathUtil;
 +import org.apache.asterix.external.api.IAdapterFactory;
 +import org.apache.asterix.external.feed.api.IFeedJoint;
 +import org.apache.asterix.external.feed.api.IFeedMessage;
 +import org.apache.asterix.external.feed.api.IFeedRuntime.FeedRuntimeType;
 +import org.apache.asterix.external.feed.management.FeedConnectionId;
 +import org.apache.asterix.external.feed.management.FeedId;
 +import org.apache.asterix.external.feed.message.EndFeedMessage;
 +import org.apache.asterix.external.feed.message.FeedTupleCommitResponseMessage;
 +import org.apache.asterix.external.feed.message.PrepareStallMessage;
 +import org.apache.asterix.external.feed.message.TerminateDataFlowMessage;
 +import org.apache.asterix.external.feed.message.ThrottlingEnabledFeedMessage;
 +import org.apache.asterix.external.feed.policy.FeedPolicyAccessor;
 +import org.apache.asterix.external.feed.watch.FeedConnectJobInfo;
 +import org.apache.asterix.external.operators.FeedMessageOperatorDescriptor;
 +import org.apache.asterix.external.util.FeedConstants;
 +import org.apache.asterix.external.util.FeedUtils;
 +import org.apache.asterix.file.JobSpecificationUtils;
 +import org.apache.asterix.metadata.declared.AqlMetadataProvider;
 +import org.apache.asterix.metadata.entities.Feed;
 +import org.apache.asterix.om.util.AsterixClusterProperties;
 +import org.apache.hyracks.algebricks.common.constraints.AlgebricksAbsolutePartitionConstraint;
 +import org.apache.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraint;
 +import org.apache.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraintHelper;
 +import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
 +import org.apache.hyracks.algebricks.common.utils.Pair;
 +import org.apache.hyracks.algebricks.common.utils.Triple;
 +import org.apache.hyracks.api.dataflow.IOperatorDescriptor;
 +import org.apache.hyracks.api.job.JobSpecification;
 +import org.apache.hyracks.dataflow.std.connectors.OneToOneConnectorDescriptor;
 +import org.apache.hyracks.dataflow.std.file.FileRemoveOperatorDescriptor;
 +import org.apache.hyracks.dataflow.std.file.FileSplit;
 +import org.apache.hyracks.dataflow.std.file.IFileSplitProvider;
 +import org.apache.hyracks.dataflow.std.misc.NullSinkOperatorDescriptor;
 +
 +/**
 + * Provides helper method(s) for creating JobSpec for operations on a feed.
 + */
 +public class FeedOperations {
 +
 +    /**
 +     * Builds the job spec for ingesting a (primary) feed from its external source via the feed adaptor.
++     *
 +     * @param primaryFeed
 +     * @param metadataProvider
 +     * @return JobSpecification the Hyracks job specification for receiving data from external source
 +     * @throws Exception
 +     */
 +    public static Pair<JobSpecification, IAdapterFactory> buildFeedIntakeJobSpec(Feed primaryFeed,
 +            AqlMetadataProvider metadataProvider, FeedPolicyAccessor policyAccessor) throws Exception {
- 
 +        JobSpecification spec = JobSpecificationUtils.createJobSpecification();
 +        spec.setFrameSize(FeedConstants.JobConstants.DEFAULT_FRAME_SIZE);
 +        IAdapterFactory adapterFactory = null;
 +        IOperatorDescriptor feedIngestor;
 +        AlgebricksPartitionConstraint ingesterPc;
- 
-         try {
-             Triple<IOperatorDescriptor, AlgebricksPartitionConstraint, IAdapterFactory> t = metadataProvider
-                     .buildFeedIntakeRuntime(spec, primaryFeed, policyAccessor);
-             feedIngestor = t.first;
-             ingesterPc = t.second;
-             adapterFactory = t.third;
-         } catch (AlgebricksException e) {
-             e.printStackTrace();
-             throw new AsterixException(e);
-         }
- 
++        Triple<IOperatorDescriptor, AlgebricksPartitionConstraint, IAdapterFactory> t = metadataProvider
++                .buildFeedIntakeRuntime(spec, primaryFeed, policyAccessor);
++        feedIngestor = t.first;
++        ingesterPc = t.second;
++        adapterFactory = t.third;
 +        AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(spec, feedIngestor, ingesterPc);
- 
 +        NullSinkOperatorDescriptor nullSink = new NullSinkOperatorDescriptor(spec);
 +        AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(spec, nullSink, ingesterPc);
 +        spec.connect(new OneToOneConnectorDescriptor(spec), feedIngestor, 0, nullSink, 0);
 +        spec.addRoot(nullSink);
 +        return new Pair<JobSpecification, IAdapterFactory>(spec, adapterFactory);
 +    }
 +
 +    public static JobSpecification buildDiscontinueFeedSourceSpec(AqlMetadataProvider metadataProvider, FeedId feedId)
 +            throws AsterixException, AlgebricksException {
 +
 +        JobSpecification spec = JobSpecificationUtils.createJobSpecification();
 +        IOperatorDescriptor feedMessenger = null;
 +        AlgebricksPartitionConstraint messengerPc = null;
 +
 +        List<String> locations = FeedLifecycleListener.INSTANCE.getIntakeLocations(feedId);
 +        Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> p = buildDiscontinueFeedMessengerRuntime(spec, feedId,
 +                locations);
 +
 +        feedMessenger = p.first;
 +        messengerPc = p.second;
 +
 +        AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(spec, feedMessenger, messengerPc);
 +        NullSinkOperatorDescriptor nullSink = new NullSinkOperatorDescriptor(spec);
 +        AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(spec, nullSink, messengerPc);
 +        spec.connect(new OneToOneConnectorDescriptor(spec), feedMessenger, 0, nullSink, 0);
 +        spec.addRoot(nullSink);
 +
 +        return spec;
 +    }
 +
 +    /**
 +     * Builds the job spec for sending message to an active feed to disconnect it from the
 +     * its source.
 +     */
 +    public static Pair<JobSpecification, Boolean> buildDisconnectFeedJobSpec(AqlMetadataProvider metadataProvider,
 +            FeedConnectionId connectionId) throws AsterixException, AlgebricksException {
 +
 +        JobSpecification spec = JobSpecificationUtils.createJobSpecification();
 +        IOperatorDescriptor feedMessenger;
 +        AlgebricksPartitionConstraint messengerPc;
 +        List<String> locations = null;
 +        FeedRuntimeType sourceRuntimeType;
 +        try {
 +            FeedConnectJobInfo cInfo = FeedLifecycleListener.INSTANCE.getFeedConnectJobInfo(connectionId);
 +            IFeedJoint sourceFeedJoint = cInfo.getSourceFeedJoint();
 +            IFeedJoint computeFeedJoint = cInfo.getComputeFeedJoint();
 +
 +            boolean terminateIntakeJob = false;
 +            boolean completeDisconnect = computeFeedJoint == null || computeFeedJoint.getReceivers().isEmpty();
 +            if (completeDisconnect) {
 +                sourceRuntimeType = FeedRuntimeType.INTAKE;
 +                locations = cInfo.getCollectLocations();
 +                terminateIntakeJob = sourceFeedJoint.getReceivers().size() == 1;
 +            } else {
 +                locations = cInfo.getComputeLocations();
 +                sourceRuntimeType = FeedRuntimeType.COMPUTE;
 +            }
 +
 +            Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> p = buildDisconnectFeedMessengerRuntime(spec,
 +                    connectionId, locations, sourceRuntimeType, completeDisconnect, sourceFeedJoint.getOwnerFeedId());
 +
 +            feedMessenger = p.first;
 +            messengerPc = p.second;
 +
 +            AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(spec, feedMessenger, messengerPc);
 +            NullSinkOperatorDescriptor nullSink = new NullSinkOperatorDescriptor(spec);
 +            AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(spec, nullSink, messengerPc);
 +            spec.connect(new OneToOneConnectorDescriptor(spec), feedMessenger, 0, nullSink, 0);
 +            spec.addRoot(nullSink);
 +            return new Pair<JobSpecification, Boolean>(spec, terminateIntakeJob);
 +
 +        } catch (AlgebricksException e) {
 +            throw new AsterixException(e);
 +        }
 +
 +    }
 +
 +    public static JobSpecification buildPrepareStallMessageJob(PrepareStallMessage stallMessage,
 +            Collection<String> collectLocations) throws AsterixException {
 +        JobSpecification messageJobSpec = JobSpecificationUtils.createJobSpecification();
 +        try {
 +            Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> p = FeedOperations.buildSendFeedMessageRuntime(
 +                    messageJobSpec, stallMessage.getConnectionId(), stallMessage, collectLocations);
 +            buildSendFeedMessageJobSpec(p.first, p.second, messageJobSpec);
 +        } catch (AlgebricksException ae) {
 +            throw new AsterixException(ae);
 +        }
 +        return messageJobSpec;
 +    }
 +
 +    public static JobSpecification buildNotifyThrottlingEnabledMessageJob(
 +            ThrottlingEnabledFeedMessage throttlingEnabledMesg, Collection<String> locations) throws AsterixException {
 +        JobSpecification messageJobSpec = JobSpecificationUtils.createJobSpecification();
 +        try {
 +            Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> p = FeedOperations.buildSendFeedMessageRuntime(
 +                    messageJobSpec, throttlingEnabledMesg.getConnectionId(), throttlingEnabledMesg, locations);
 +            buildSendFeedMessageJobSpec(p.first, p.second, messageJobSpec);
 +        } catch (AlgebricksException ae) {
 +            throw new AsterixException(ae);
 +        }
 +        return messageJobSpec;
 +    }
 +
 +    public static JobSpecification buildTerminateFlowMessageJob(TerminateDataFlowMessage terminateMessage,
 +            List<String> collectLocations) throws AsterixException {
 +        JobSpecification messageJobSpec = JobSpecificationUtils.createJobSpecification();
 +        try {
 +            Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> p = FeedOperations.buildSendFeedMessageRuntime(
 +                    messageJobSpec, terminateMessage.getConnectionId(), terminateMessage, collectLocations);
 +            buildSendFeedMessageJobSpec(p.first, p.second, messageJobSpec);
 +        } catch (AlgebricksException ae) {
 +            throw new AsterixException(ae);
 +        }
 +        return messageJobSpec;
 +    }
 +
 +    public static JobSpecification buildCommitAckResponseJob(FeedTupleCommitResponseMessage commitResponseMessage,
 +            Collection<String> targetLocations) throws AsterixException {
 +        JobSpecification messageJobSpec = JobSpecificationUtils.createJobSpecification();
 +        try {
 +            Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> p = FeedOperations.buildSendFeedMessageRuntime(
 +                    messageJobSpec, commitResponseMessage.getConnectionId(), commitResponseMessage, targetLocations);
 +            buildSendFeedMessageJobSpec(p.first, p.second, messageJobSpec);
 +        } catch (AlgebricksException ae) {
 +            throw new AsterixException(ae);
 +        }
 +        return messageJobSpec;
 +    }
 +
 +    public static Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> buildDiscontinueFeedMessengerRuntime(
 +            JobSpecification jobSpec, FeedId feedId, List<String> locations) throws AlgebricksException {
 +        FeedConnectionId feedConnectionId = new FeedConnectionId(feedId, null);
 +        IFeedMessage feedMessage = new EndFeedMessage(feedConnectionId, FeedRuntimeType.INTAKE,
 +                feedConnectionId.getFeedId(), true, EndFeedMessage.EndMessageType.DISCONTINUE_SOURCE);
 +        return buildSendFeedMessageRuntime(jobSpec, feedConnectionId, feedMessage, locations);
 +    }
 +
 +    private static Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> buildSendFeedMessageRuntime(
 +            JobSpecification jobSpec, FeedConnectionId feedConenctionId, IFeedMessage feedMessage,
 +            Collection<String> locations) throws AlgebricksException {
 +        AlgebricksPartitionConstraint partitionConstraint = new AlgebricksAbsolutePartitionConstraint(
 +                locations.toArray(new String[] {}));
 +        FeedMessageOperatorDescriptor feedMessenger = new FeedMessageOperatorDescriptor(jobSpec, feedConenctionId,
 +                feedMessage);
 +        return new Pair<IOperatorDescriptor, AlgebricksPartitionConstraint>(feedMessenger, partitionConstraint);
 +    }
 +
 +    private static JobSpecification buildSendFeedMessageJobSpec(IOperatorDescriptor operatorDescriptor,
 +            AlgebricksPartitionConstraint messengerPc, JobSpecification messageJobSpec) {
 +        AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(messageJobSpec, operatorDescriptor,
 +                messengerPc);
 +        NullSinkOperatorDescriptor nullSink = new NullSinkOperatorDescriptor(messageJobSpec);
 +        AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(messageJobSpec, nullSink, messengerPc);
 +        messageJobSpec.connect(new OneToOneConnectorDescriptor(messageJobSpec), operatorDescriptor, 0, nullSink, 0);
 +        messageJobSpec.addRoot(nullSink);
 +        return messageJobSpec;
 +    }
 +
 +    private static Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> buildDisconnectFeedMessengerRuntime(
 +            JobSpecification jobSpec, FeedConnectionId feedConenctionId, List<String> locations,
 +            FeedRuntimeType sourceFeedRuntimeType, boolean completeDisconnection, FeedId sourceFeedId)
-                     throws AlgebricksException {
++            throws AlgebricksException {
 +        IFeedMessage feedMessage = new EndFeedMessage(feedConenctionId, sourceFeedRuntimeType, sourceFeedId,
 +                completeDisconnection, EndFeedMessage.EndMessageType.DISCONNECT_FEED);
 +        return buildSendFeedMessageRuntime(jobSpec, feedConenctionId, feedMessage, locations);
 +    }
 +
 +    public static JobSpecification buildRemoveFeedStorageJob(Feed feed) throws Exception {
 +        JobSpecification spec = JobSpecificationUtils.createJobSpecification();
-         AlgebricksAbsolutePartitionConstraint locations = AsterixClusterProperties.INSTANCE.getClusterLocations();
++        AlgebricksAbsolutePartitionConstraint allCluster = AsterixClusterProperties.INSTANCE.getClusterLocations();
++        Set<String> nodes = new TreeSet<>();
++        for (String node : allCluster.getLocations()) {
++            nodes.add(node);
++        }
++        AlgebricksAbsolutePartitionConstraint locations = new AlgebricksAbsolutePartitionConstraint(
++                nodes.toArray(new String[nodes.size()]));
 +        FileSplit[] feedLogFileSplits = FeedUtils.splitsForAdapter(feed.getDataverseName(), feed.getFeedName(),
 +                locations);
 +        Pair<IFileSplitProvider, AlgebricksPartitionConstraint> splitsAndConstraint = StoragePathUtil
 +                .splitProviderAndPartitionConstraints(feedLogFileSplits);
-         FileRemoveOperatorDescriptor frod = new FileRemoveOperatorDescriptor(spec, splitsAndConstraint.first);
++        FileRemoveOperatorDescriptor frod = new FileRemoveOperatorDescriptor(spec, splitsAndConstraint.first, true);
 +        AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(spec, frod, splitsAndConstraint.second);
 +        spec.addRoot(frod);
 +        return spec;
 +    }
 +}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/main/java/org/apache/asterix/file/DatasetOperations.java
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/main/java/org/apache/asterix/file/DatasetOperations.java
index 06d2b71,0000000..9052696
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/file/DatasetOperations.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/file/DatasetOperations.java
@@@ -1,265 -1,0 +1,261 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +
 +package org.apache.asterix.file;
 +
 +import java.io.File;
 +import java.rmi.RemoteException;
 +import java.util.Map;
 +import java.util.logging.Logger;
 +
 +import org.apache.asterix.common.api.ILocalResourceMetadata;
 +import org.apache.asterix.common.config.AsterixStorageProperties;
 +import org.apache.asterix.common.config.DatasetConfig.DatasetType;
 +import org.apache.asterix.common.context.AsterixVirtualBufferCacheProvider;
 +import org.apache.asterix.common.exceptions.ACIDException;
 +import org.apache.asterix.common.exceptions.AsterixException;
 +import org.apache.asterix.common.ioopcallbacks.LSMBTreeIOOperationCallbackFactory;
 +import org.apache.asterix.formats.base.IDataFormat;
 +import org.apache.asterix.metadata.MetadataManager;
 +import org.apache.asterix.metadata.declared.AqlMetadataProvider;
 +import org.apache.asterix.metadata.entities.Dataset;
 +import org.apache.asterix.metadata.entities.Dataverse;
 +import org.apache.asterix.metadata.utils.DatasetUtils;
 +import org.apache.asterix.om.types.ARecordType;
 +import org.apache.asterix.om.util.AsterixAppContextInfo;
 +import org.apache.asterix.transaction.management.opcallbacks.PrimaryIndexOperationTrackerProvider;
 +import org.apache.asterix.transaction.management.resource.LSMBTreeLocalResourceMetadata;
 +import org.apache.asterix.transaction.management.resource.PersistentLocalResourceFactoryProvider;
 +import org.apache.asterix.transaction.management.service.transaction.AsterixRuntimeComponentsProvider;
 +import org.apache.asterix.translator.CompiledStatements.CompiledDatasetDropStatement;
 +import org.apache.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraint;
 +import org.apache.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraintHelper;
 +import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
 +import org.apache.hyracks.algebricks.common.utils.Pair;
 +import org.apache.hyracks.api.dataflow.value.IBinaryComparatorFactory;
 +import org.apache.hyracks.api.dataflow.value.ITypeTraits;
 +import org.apache.hyracks.api.exceptions.HyracksDataException;
 +import org.apache.hyracks.api.job.JobSpecification;
 +import org.apache.hyracks.dataflow.std.file.FileSplit;
 +import org.apache.hyracks.dataflow.std.file.IFileSplitProvider;
 +import org.apache.hyracks.storage.am.common.dataflow.IndexDropOperatorDescriptor;
 +import org.apache.hyracks.storage.am.common.dataflow.TreeIndexCreateOperatorDescriptor;
 +import org.apache.hyracks.storage.am.common.impls.NoOpOperationCallbackFactory;
 +import org.apache.hyracks.storage.am.lsm.btree.dataflow.LSMBTreeDataflowHelperFactory;
 +import org.apache.hyracks.storage.am.lsm.common.api.ILSMMergePolicyFactory;
 +import org.apache.hyracks.storage.am.lsm.common.dataflow.LSMTreeIndexCompactOperatorDescriptor;
 +import org.apache.hyracks.storage.common.file.ILocalResourceFactoryProvider;
 +import org.apache.hyracks.storage.common.file.LocalResource;
 +
 +public class DatasetOperations {
 +
 +    private static Logger LOGGER = Logger.getLogger(DatasetOperations.class.getName());
 +
 +    public static JobSpecification createDropDatasetJobSpec(CompiledDatasetDropStatement datasetDropStmt,
 +            AqlMetadataProvider metadataProvider)
-                     throws AlgebricksException, HyracksDataException, RemoteException, ACIDException, AsterixException {
++            throws AlgebricksException, HyracksDataException, RemoteException, ACIDException, AsterixException {
 +
 +        String dataverseName = null;
 +        if (datasetDropStmt.getDataverseName() != null) {
 +            dataverseName = datasetDropStmt.getDataverseName();
 +        } else if (metadataProvider.getDefaultDataverse() != null) {
 +            dataverseName = metadataProvider.getDefaultDataverse().getDataverseName();
 +        }
 +
 +        String datasetName = datasetDropStmt.getDatasetName();
 +        String datasetPath = dataverseName + File.separator + datasetName;
 +
 +        LOGGER.info("DROP DATASETPATH: " + datasetPath);
 +
 +        Dataset dataset = metadataProvider.findDataset(dataverseName, datasetName);
 +        if (dataset == null) {
 +            throw new AlgebricksException("DROP DATASET: No metadata for dataset " + datasetName);
 +        }
 +        if (dataset.getDatasetType() == DatasetType.EXTERNAL) {
 +            return JobSpecificationUtils.createJobSpecification();
 +        }
 +        boolean temp = dataset.getDatasetDetails().isTemp();
 +
 +        Dataverse dataverse = MetadataManager.INSTANCE.getDataverse(metadataProvider.getMetadataTxnContext(),
 +                dataverseName);
 +        IDataFormat format;
 +        try {
 +            format = (IDataFormat) Class.forName(dataverse.getDataFormat()).newInstance();
 +        } catch (Exception e) {
 +            throw new AsterixException(e);
 +        }
 +
 +        ARecordType itemType = (ARecordType) metadataProvider.findType(dataset.getItemTypeDataverseName(),
 +                dataset.getItemTypeName());
 +
 +        ITypeTraits[] filterTypeTraits = DatasetUtils.computeFilterTypeTraits(dataset, itemType);
 +        IBinaryComparatorFactory[] filterCmpFactories = DatasetUtils.computeFilterBinaryComparatorFactories(dataset,
 +                itemType, format.getBinaryComparatorFactoryProvider());
 +        int[] filterFields = DatasetUtils.createFilterFields(dataset);
 +        int[] btreeFields = DatasetUtils.createBTreeFieldsWhenThereisAFilter(dataset);
 +        JobSpecification specPrimary = JobSpecificationUtils.createJobSpecification();
 +
 +        Pair<IFileSplitProvider, AlgebricksPartitionConstraint> splitsAndConstraint = metadataProvider
 +                .splitProviderAndPartitionConstraintsForDataset(dataset.getDataverseName(), datasetName, datasetName,
 +                        temp);
 +        AsterixStorageProperties storageProperties = AsterixAppContextInfo.getInstance().getStorageProperties();
 +        Pair<ILSMMergePolicyFactory, Map<String, String>> compactionInfo = DatasetUtils.getMergePolicyFactory(dataset,
 +                metadataProvider.getMetadataTxnContext());
 +
 +        IndexDropOperatorDescriptor primaryBtreeDrop = new IndexDropOperatorDescriptor(specPrimary,
 +                AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER, AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER,
 +                splitsAndConstraint.first,
 +                new LSMBTreeDataflowHelperFactory(new AsterixVirtualBufferCacheProvider(dataset.getDatasetId()),
 +                        compactionInfo.first, compactionInfo.second,
 +                        new PrimaryIndexOperationTrackerProvider(dataset.getDatasetId()),
 +                        AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER, LSMBTreeIOOperationCallbackFactory.INSTANCE,
 +                        storageProperties.getBloomFilterFalsePositiveRate(), true, filterTypeTraits, filterCmpFactories,
 +                        btreeFields, filterFields, !temp));
 +        AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(specPrimary, primaryBtreeDrop,
 +                splitsAndConstraint.second);
 +
 +        specPrimary.addRoot(primaryBtreeDrop);
 +
 +        return specPrimary;
 +    }
 +
 +    public static JobSpecification createDatasetJobSpec(Dataverse dataverse, String datasetName,
 +            AqlMetadataProvider metadata) throws AsterixException, AlgebricksException {
 +        String dataverseName = dataverse.getDataverseName();
 +        IDataFormat format;
 +        try {
 +            format = (IDataFormat) Class.forName(dataverse.getDataFormat()).newInstance();
 +        } catch (Exception e) {
 +            throw new AsterixException(e);
 +        }
 +        Dataset dataset = metadata.findDataset(dataverseName, datasetName);
 +        if (dataset == null) {
 +            throw new AsterixException("Could not find dataset " + datasetName + " in dataverse " + dataverseName);
 +        }
 +        boolean temp = dataset.getDatasetDetails().isTemp();
 +        ARecordType itemType = (ARecordType) metadata.findType(dataset.getItemTypeDataverseName(),
 +                dataset.getItemTypeName());
 +        // get meta item type
 +        ARecordType metaItemType = null;
 +        if (dataset.hasMetaPart()) {
 +            metaItemType = (ARecordType) metadata.findType(dataset.getMetaItemTypeDataverseName(),
 +                    dataset.getMetaItemTypeName());
 +        }
 +        JobSpecification spec = JobSpecificationUtils.createJobSpecification();
 +        IBinaryComparatorFactory[] comparatorFactories = DatasetUtils.computeKeysBinaryComparatorFactories(dataset,
 +                itemType, format.getBinaryComparatorFactoryProvider());
 +        ITypeTraits[] typeTraits = DatasetUtils.computeTupleTypeTraits(dataset, itemType, metaItemType);
 +        int[] bloomFilterKeyFields = DatasetUtils.createBloomFilterKeyFields(dataset);
 +
 +        ITypeTraits[] filterTypeTraits = DatasetUtils.computeFilterTypeTraits(dataset, itemType);
 +        IBinaryComparatorFactory[] filterCmpFactories = DatasetUtils.computeFilterBinaryComparatorFactories(dataset,
 +                itemType, format.getBinaryComparatorFactoryProvider());
 +        int[] filterFields = DatasetUtils.createFilterFields(dataset);
 +        int[] btreeFields = DatasetUtils.createBTreeFieldsWhenThereisAFilter(dataset);
 +
 +        Pair<IFileSplitProvider, AlgebricksPartitionConstraint> splitsAndConstraint = metadata
 +                .splitProviderAndPartitionConstraintsForDataset(dataverseName, datasetName, datasetName, temp);
 +        FileSplit[] fs = splitsAndConstraint.first.getFileSplits();
 +        StringBuilder sb = new StringBuilder();
 +        for (int i = 0; i < fs.length; i++) {
 +            sb.append(stringOf(fs[i]) + " ");
 +        }
 +        LOGGER.info("CREATING File Splits: " + sb.toString());
 +
 +        Pair<ILSMMergePolicyFactory, Map<String, String>> compactionInfo = DatasetUtils.getMergePolicyFactory(dataset,
 +                metadata.getMetadataTxnContext());
 +        AsterixStorageProperties storageProperties = AsterixAppContextInfo.getInstance().getStorageProperties();
 +        //prepare a LocalResourceMetadata which will be stored in NC's local resource repository
 +        ILocalResourceMetadata localResourceMetadata = new LSMBTreeLocalResourceMetadata(typeTraits,
 +                comparatorFactories, bloomFilterKeyFields, true, dataset.getDatasetId(), compactionInfo.first,
 +                compactionInfo.second, filterTypeTraits, filterCmpFactories, btreeFields, filterFields);
 +        ILocalResourceFactoryProvider localResourceFactoryProvider = new PersistentLocalResourceFactoryProvider(
 +                localResourceMetadata, LocalResource.LSMBTreeResource);
 +
 +        TreeIndexCreateOperatorDescriptor indexCreateOp = new TreeIndexCreateOperatorDescriptor(spec,
 +                AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER, AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER,
 +                splitsAndConstraint.first, typeTraits, comparatorFactories, bloomFilterKeyFields,
 +                new LSMBTreeDataflowHelperFactory(new AsterixVirtualBufferCacheProvider(dataset.getDatasetId()),
 +                        compactionInfo.first, compactionInfo.second,
 +                        new PrimaryIndexOperationTrackerProvider(dataset.getDatasetId()),
 +                        AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER, LSMBTreeIOOperationCallbackFactory.INSTANCE,
 +                        storageProperties.getBloomFilterFalsePositiveRate(), true, filterTypeTraits, filterCmpFactories,
 +                        btreeFields, filterFields, !temp),
 +                localResourceFactoryProvider, NoOpOperationCallbackFactory.INSTANCE);
 +        AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(spec, indexCreateOp,
 +                splitsAndConstraint.second);
 +        spec.addRoot(indexCreateOp);
 +        return spec;
 +    }
 +
 +    private static String stringOf(FileSplit fs) {
 +        return fs.getNodeName() + ":" + fs.getLocalFile().toString();
 +    }
 +
 +    public static JobSpecification compactDatasetJobSpec(Dataverse dataverse, String datasetName,
 +            AqlMetadataProvider metadata) throws AsterixException, AlgebricksException {
 +        String dataverseName = dataverse.getDataverseName();
 +        IDataFormat format;
 +        try {
 +            format = (IDataFormat) Class.forName(dataverse.getDataFormat()).newInstance();
 +        } catch (Exception e) {
 +            throw new AsterixException(e);
 +        }
 +        Dataset dataset = metadata.findDataset(dataverseName, datasetName);
 +        if (dataset == null) {
 +            throw new AsterixException("Could not find dataset " + datasetName + " in dataverse " + dataverseName);
 +        }
 +        boolean temp = dataset.getDatasetDetails().isTemp();
- 
 +        ARecordType itemType = (ARecordType) metadata.findType(dataset.getItemTypeDataverseName(),
 +                dataset.getItemTypeName());
++        ARecordType metaItemType = DatasetUtils.getMetaType(metadata, dataset);
 +        JobSpecification spec = JobSpecificationUtils.createJobSpecification();
 +        IBinaryComparatorFactory[] comparatorFactories = DatasetUtils.computeKeysBinaryComparatorFactories(dataset,
 +                itemType, format.getBinaryComparatorFactoryProvider());
-         ITypeTraits[] typeTraits = DatasetUtils.computeTupleTypeTraits(dataset, itemType);
++        ITypeTraits[] typeTraits = DatasetUtils.computeTupleTypeTraits(dataset, itemType, metaItemType);
 +        int[] blooFilterKeyFields = DatasetUtils.createBloomFilterKeyFields(dataset);
- 
 +        ITypeTraits[] filterTypeTraits = DatasetUtils.computeFilterTypeTraits(dataset, itemType);
 +        IBinaryComparatorFactory[] filterCmpFactories = DatasetUtils.computeFilterBinaryComparatorFactories(dataset,
 +                itemType, format.getBinaryComparatorFactoryProvider());
 +        int[] filterFields = DatasetUtils.createFilterFields(dataset);
 +        int[] btreeFields = DatasetUtils.createBTreeFieldsWhenThereisAFilter(dataset);
- 
 +        Pair<IFileSplitProvider, AlgebricksPartitionConstraint> splitsAndConstraint = metadata
 +                .splitProviderAndPartitionConstraintsForDataset(dataverseName, datasetName, datasetName, temp);
- 
 +        AsterixStorageProperties storageProperties = AsterixAppContextInfo.getInstance().getStorageProperties();
- 
 +        Pair<ILSMMergePolicyFactory, Map<String, String>> compactionInfo = DatasetUtils.getMergePolicyFactory(dataset,
 +                metadata.getMetadataTxnContext());
 +        LSMTreeIndexCompactOperatorDescriptor compactOp = new LSMTreeIndexCompactOperatorDescriptor(spec,
 +                AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER, AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER,
 +                splitsAndConstraint.first, typeTraits, comparatorFactories, blooFilterKeyFields,
 +                new LSMBTreeDataflowHelperFactory(new AsterixVirtualBufferCacheProvider(dataset.getDatasetId()),
 +                        compactionInfo.first, compactionInfo.second,
 +                        new PrimaryIndexOperationTrackerProvider(dataset.getDatasetId()),
 +                        AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER, LSMBTreeIOOperationCallbackFactory.INSTANCE,
 +                        storageProperties.getBloomFilterFalsePositiveRate(), true, filterTypeTraits, filterCmpFactories,
 +                        btreeFields, filterFields, !temp),
 +                NoOpOperationCallbackFactory.INSTANCE);
 +        AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(spec, compactOp,
 +                splitsAndConstraint.second);
 +
 +        AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(spec, compactOp,
 +                splitsAndConstraint.second);
 +        spec.addRoot(compactOp);
 +        return spec;
 +    }
 +}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/main/java/org/apache/asterix/file/DataverseOperations.java
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/main/java/org/apache/asterix/file/DataverseOperations.java
index c77ca10,0000000..d5765f1
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/file/DataverseOperations.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/file/DataverseOperations.java
@@@ -1,40 -1,0 +1,40 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +package org.apache.asterix.file;
 +
 +import org.apache.asterix.metadata.declared.AqlMetadataProvider;
 +import org.apache.asterix.metadata.entities.Dataverse;
 +import org.apache.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraint;
 +import org.apache.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraintHelper;
 +import org.apache.hyracks.algebricks.common.utils.Pair;
 +import org.apache.hyracks.api.job.JobSpecification;
 +import org.apache.hyracks.dataflow.std.file.FileRemoveOperatorDescriptor;
 +import org.apache.hyracks.dataflow.std.file.IFileSplitProvider;
 +
 +public class DataverseOperations {
 +    public static JobSpecification createDropDataverseJobSpec(Dataverse dataverse, AqlMetadataProvider metadata) {
 +        JobSpecification jobSpec = JobSpecificationUtils.createJobSpecification();
 +        Pair<IFileSplitProvider, AlgebricksPartitionConstraint> splitsAndConstraint = metadata
 +                .splitProviderAndPartitionConstraintsForDataverse(dataverse.getDataverseName());
-         FileRemoveOperatorDescriptor frod = new FileRemoveOperatorDescriptor(jobSpec, splitsAndConstraint.first);
++        FileRemoveOperatorDescriptor frod = new FileRemoveOperatorDescriptor(jobSpec, splitsAndConstraint.first, false);
 +        AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(jobSpec, frod, splitsAndConstraint.second);
 +        jobSpec.addRoot(frod);
 +        return jobSpec;
 +    }
 +}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/main/java/org/apache/asterix/messaging/NCMessageBroker.java
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/main/java/org/apache/asterix/messaging/NCMessageBroker.java
index 0a0a917,0000000..13b0189
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/messaging/NCMessageBroker.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/messaging/NCMessageBroker.java
@@@ -1,212 -1,0 +1,215 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +package org.apache.asterix.messaging;
 +
 +import java.util.Map;
 +import java.util.concurrent.ConcurrentHashMap;
 +import java.util.concurrent.atomic.AtomicLong;
 +import java.util.logging.Level;
 +import java.util.logging.Logger;
 +
 +import org.apache.asterix.common.api.IAsterixAppRuntimeContext;
 +import org.apache.asterix.common.messaging.AbstractApplicationMessage;
 +import org.apache.asterix.common.messaging.CompleteFailbackRequestMessage;
 +import org.apache.asterix.common.messaging.CompleteFailbackResponseMessage;
 +import org.apache.asterix.common.messaging.PreparePartitionsFailbackRequestMessage;
 +import org.apache.asterix.common.messaging.PreparePartitionsFailbackResponseMessage;
 +import org.apache.asterix.common.messaging.ReplicaEventMessage;
 +import org.apache.asterix.common.messaging.ReportMaxResourceIdMessage;
 +import org.apache.asterix.common.messaging.TakeoverMetadataNodeResponseMessage;
 +import org.apache.asterix.common.messaging.TakeoverPartitionsRequestMessage;
 +import org.apache.asterix.common.messaging.TakeoverPartitionsResponseMessage;
 +import org.apache.asterix.common.messaging.api.IApplicationMessage;
 +import org.apache.asterix.common.messaging.api.IApplicationMessageCallback;
 +import org.apache.asterix.common.messaging.api.INCMessageBroker;
 +import org.apache.asterix.common.replication.IRemoteRecoveryManager;
 +import org.apache.asterix.common.replication.Replica;
 +import org.apache.asterix.common.replication.ReplicaEvent;
 +import org.apache.asterix.event.schema.cluster.Node;
 +import org.apache.asterix.metadata.bootstrap.MetadataIndexImmutableProperties;
 +import org.apache.asterix.transaction.management.resource.PersistentLocalResourceRepository;
 +import org.apache.hyracks.api.messages.IMessage;
 +import org.apache.hyracks.api.util.JavaSerializationUtils;
 +import org.apache.hyracks.control.nc.NodeControllerService;
 +
 +public class NCMessageBroker implements INCMessageBroker {
 +    private final static Logger LOGGER = Logger.getLogger(NCMessageBroker.class.getName());
 +
 +    private final NodeControllerService ncs;
 +    private final AtomicLong messageId = new AtomicLong(0);
 +    private final Map<Long, IApplicationMessageCallback> callbacks;
 +    private final IAsterixAppRuntimeContext appContext;
 +
 +    public NCMessageBroker(NodeControllerService ncs) {
 +        this.ncs = ncs;
 +        appContext = (IAsterixAppRuntimeContext) ncs.getApplicationContext().getApplicationObject();
 +        callbacks = new ConcurrentHashMap<Long, IApplicationMessageCallback>();
 +    }
 +
 +    @Override
 +    public void sendMessage(IApplicationMessage message, IApplicationMessageCallback callback) throws Exception {
 +        if (callback != null) {
 +            long uniqueMessageId = messageId.incrementAndGet();
 +            message.setId(uniqueMessageId);
 +            callbacks.put(uniqueMessageId, callback);
 +        }
 +        try {
 +            ncs.sendApplicationMessageToCC(JavaSerializationUtils.serialize(message), null);
 +        } catch (Exception e) {
 +            if (callback != null) {
 +                //remove the callback in case of failure
 +                callbacks.remove(message.getId());
 +            }
 +            throw e;
 +        }
 +    }
 +
 +    @Override
 +    public void receivedMessage(IMessage message, String nodeId) throws Exception {
 +        try {
 +            AbstractApplicationMessage absMessage = (AbstractApplicationMessage) message;
 +            if (LOGGER.isLoggable(Level.INFO)) {
 +                LOGGER.info("Received message: " + absMessage.getMessageType().name());
 +            }
 +            //if the received message is a response to a sent message, deliver it to the sender
 +            IApplicationMessageCallback callback = callbacks.remove(absMessage.getId());
 +            if (callback != null) {
 +                callback.deliverMessageResponse(absMessage);
 +            }
 +
 +            //handle requests from CC
 +            switch (absMessage.getMessageType()) {
 +                case REPORT_MAX_RESOURCE_ID_REQUEST:
 +                    reportMaxResourceId();
 +                    break;
 +                case TAKEOVER_PARTITIONS_REQUEST:
 +                    handleTakeoverPartitons(message);
 +                    break;
 +                case TAKEOVER_METADATA_NODE_REQUEST:
 +                    handleTakeoverMetadataNode(message);
 +                    break;
 +                case PREPARE_PARTITIONS_FAILBACK_REQUEST:
 +                    handlePreparePartitionsFailback(message);
 +                    break;
 +                case COMPLETE_FAILBACK_REQUEST:
 +                    handleCompleteFailbackRequest(message);
 +                    break;
 +                case REPLICA_EVENT:
 +                    handleReplicaEvent(message);
 +                    break;
 +                default:
 +                    break;
 +            }
 +        } catch (Exception e) {
 +            e.printStackTrace();
 +            throw e;
 +        }
 +    }
 +
 +    private void handleTakeoverPartitons(IMessage message) throws Exception {
 +        TakeoverPartitionsRequestMessage msg = (TakeoverPartitionsRequestMessage) message;
-         try {
-             IRemoteRecoveryManager remoteRecoeryManager = appContext.getRemoteRecoveryManager();
-             remoteRecoeryManager.takeoverPartitons(msg.getPartitions());
-         } finally {
-             //send response after takeover is completed
-             TakeoverPartitionsResponseMessage reponse = new TakeoverPartitionsResponseMessage(msg.getRequestId(),
-                     appContext.getTransactionSubsystem().getId(), msg.getPartitions());
-             sendMessage(reponse, null);
++        //if the NC is shutting down, it should ignore takeover partitions request
++        if (!appContext.isShuttingdown()) {
++            try {
++                IRemoteRecoveryManager remoteRecoeryManager = appContext.getRemoteRecoveryManager();
++                remoteRecoeryManager.takeoverPartitons(msg.getPartitions());
++            } finally {
++                //send response after takeover is completed
++                TakeoverPartitionsResponseMessage reponse = new TakeoverPartitionsResponseMessage(msg.getRequestId(),
++                        appContext.getTransactionSubsystem().getId(), msg.getPartitions());
++                sendMessage(reponse, null);
++            }
 +        }
 +    }
 +
 +    private void handleTakeoverMetadataNode(IMessage message) throws Exception {
 +        try {
 +            appContext.initializeMetadata(false);
 +            appContext.exportMetadataNodeStub();
 +        } finally {
 +            TakeoverMetadataNodeResponseMessage reponse = new TakeoverMetadataNodeResponseMessage(
 +                    appContext.getTransactionSubsystem().getId());
 +            sendMessage(reponse, null);
 +        }
 +    }
 +
 +    @Override
 +    public void reportMaxResourceId() throws Exception {
 +        ReportMaxResourceIdMessage maxResourceIdMsg = new ReportMaxResourceIdMessage();
 +        //resource ids < FIRST_AVAILABLE_USER_DATASET_ID are reserved for metadata indexes.
 +        long maxResourceId = Math.max(appContext.getLocalResourceRepository().getMaxResourceID(),
 +                MetadataIndexImmutableProperties.FIRST_AVAILABLE_USER_DATASET_ID);
 +        maxResourceIdMsg.setMaxResourceId(maxResourceId);
 +        sendMessage(maxResourceIdMsg, null);
 +    }
 +
 +    private void handleReplicaEvent(IMessage message) {
 +        ReplicaEventMessage msg = (ReplicaEventMessage) message;
 +        Node node = new Node();
 +        node.setId(msg.getNodeId());
 +        node.setClusterIp(msg.getNodeIPAddress());
 +        Replica replica = new Replica(node);
 +        ReplicaEvent event = new ReplicaEvent(replica, msg.getEvent());
 +        appContext.getReplicationManager().reportReplicaEvent(event);
 +    }
 +
 +    private void handlePreparePartitionsFailback(IMessage message) throws Exception {
 +        PreparePartitionsFailbackRequestMessage msg = (PreparePartitionsFailbackRequestMessage) message;
 +        /**
 +         * if the metadata partition will be failed back
 +         * we need to flush and close all datasets including metadata datasets
 +         * otherwise we need to close all non-metadata datasets and flush metadata datasets
 +         * so that their memory components will be copied to the failing back node
 +         */
 +        if (msg.isReleaseMetadataNode()) {
 +            appContext.getDatasetLifecycleManager().closeAllDatasets();
 +            //remove the metadata node stub from RMI registry
 +            appContext.unexportMetadataNodeStub();
 +        } else {
 +            //close all non-metadata datasets
 +            appContext.getDatasetLifecycleManager().closeUserDatasets();
 +            //flush the remaining metadata datasets that were not closed
 +            appContext.getDatasetLifecycleManager().flushAllDatasets();
 +        }
 +
 +        //mark the partitions to be closed as inactive
 +        PersistentLocalResourceRepository localResourceRepo = (PersistentLocalResourceRepository) appContext
 +                .getLocalResourceRepository();
 +        for (Integer partitionId : msg.getPartitions()) {
 +            localResourceRepo.addInactivePartition(partitionId);
 +        }
 +
 +        //send response after partitions prepared for failback
 +        PreparePartitionsFailbackResponseMessage reponse = new PreparePartitionsFailbackResponseMessage(msg.getPlanId(),
 +                msg.getRequestId(), msg.getPartitions());
 +        sendMessage(reponse, null);
 +    }
 +
 +    private void handleCompleteFailbackRequest(IMessage message) throws Exception {
 +        CompleteFailbackRequestMessage msg = (CompleteFailbackRequestMessage) message;
 +        try {
 +            IRemoteRecoveryManager remoteRecoeryManager = appContext.getRemoteRecoveryManager();
 +            remoteRecoeryManager.completeFailbackProcess();
 +        } finally {
 +            CompleteFailbackResponseMessage reponse = new CompleteFailbackResponseMessage(msg.getPlanId(),
 +                    msg.getRequestId(), msg.getPartitions());
 +            sendMessage(reponse, null);
 +        }
 +    }
 +}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/runtime/ExecutionTest.java
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/java/org/apache/asterix/test/runtime/ExecutionTest.java
index 8d020e7,0000000..e372d31
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/runtime/ExecutionTest.java
+++ b/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/runtime/ExecutionTest.java
@@@ -1,106 -1,0 +1,107 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +package org.apache.asterix.test.runtime;
 +
 +import java.io.File;
 +import java.util.ArrayList;
 +import java.util.Collection;
 +import java.util.logging.Logger;
 +
 +import org.apache.asterix.app.external.TestLibrarian;
 +import org.apache.asterix.common.config.AsterixTransactionProperties;
 +import org.apache.asterix.test.aql.TestExecutor;
 +import org.apache.asterix.testframework.context.TestCaseContext;
 +import org.apache.commons.lang3.StringUtils;
 +import org.junit.AfterClass;
 +import org.junit.BeforeClass;
 +import org.junit.Test;
 +import org.junit.runner.RunWith;
 +import org.junit.runners.Parameterized;
 +import org.junit.runners.Parameterized.Parameters;
 +
 +/**
 + * Runs the runtime test cases under 'asterix-app/src/test/resources/runtimets'.
 + */
 +@RunWith(Parameterized.class)
 +public class ExecutionTest {
 +
 +    protected static final Logger LOGGER = Logger.getLogger(ExecutionTest.class.getName());
 +
 +    protected static final String PATH_ACTUAL = "rttest" + File.separator;
 +    protected static final String PATH_BASE = StringUtils.join(new String[] { "src", "test", "resources", "runtimets" },
 +            File.separator);
 +
 +    protected static final String TEST_CONFIG_FILE_NAME = "asterix-build-configuration.xml";
- 
 +    protected static AsterixTransactionProperties txnProperties;
-     private final static TestExecutor testExecutor = new TestExecutor();
++    private static final TestExecutor testExecutor = new TestExecutor();
++    private static final boolean cleanupOnStart = true;
++    private static final boolean cleanupOnStop = true;
 +
 +    @BeforeClass
 +    public static void setUp() throws Exception {
 +        try {
 +            File outdir = new File(PATH_ACTUAL);
 +            outdir.mkdirs();
 +            // remove library directory
 +            TestLibrarian.removeLibraryDir();
 +            testExecutor.setLibrarian(new TestLibrarian());
-             ExecutionTestUtil.setUp();
++            ExecutionTestUtil.setUp(cleanupOnStart);
 +        } catch (Throwable th) {
 +            th.printStackTrace();
 +            throw th;
 +        }
 +    }
 +
 +    @AfterClass
 +    public static void tearDown() throws Exception {
 +        // remove library directory
 +        TestLibrarian.removeLibraryDir();
-         ExecutionTestUtil.tearDown();
++        ExecutionTestUtil.tearDown(cleanupOnStop);
 +    }
 +
 +    @Parameters(name = "ExecutionTest {index}: {0}")
 +    public static Collection<Object[]> tests() throws Exception {
 +        Collection<Object[]> testArgs = buildTestsInXml(TestCaseContext.ONLY_TESTSUITE_XML_NAME);
 +        if (testArgs.size() == 0) {
 +            testArgs = buildTestsInXml(TestCaseContext.DEFAULT_TESTSUITE_XML_NAME);
 +        }
 +        return testArgs;
 +    }
 +
 +    protected static Collection<Object[]> buildTestsInXml(String xmlfile) throws Exception {
 +        Collection<Object[]> testArgs = new ArrayList<Object[]>();
 +        TestCaseContext.Builder b = new TestCaseContext.Builder();
 +        for (TestCaseContext ctx : b.build(new File(PATH_BASE), xmlfile)) {
 +            testArgs.add(new Object[] { ctx });
 +        }
 +        return testArgs;
 +
 +    }
 +
 +    protected TestCaseContext tcCtx;
 +
 +    public ExecutionTest(TestCaseContext tcCtx) {
 +        this.tcCtx = tcCtx;
 +    }
 +
 +    @Test
 +    public void test() throws Exception {
 +        testExecutor.executeTest(PATH_ACTUAL, tcCtx, null, false, ExecutionTestUtil.FailedGroup);
 +    }
 +}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/runtime/ExecutionTestUtil.java
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/java/org/apache/asterix/test/runtime/ExecutionTestUtil.java
index 5e76ecb,0000000..d919c92
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/runtime/ExecutionTestUtil.java
+++ b/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/runtime/ExecutionTestUtil.java
@@@ -1,112 -1,0 +1,112 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +package org.apache.asterix.test.runtime;
 +
 +import java.io.File;
 +import java.nio.file.Paths;
 +import java.util.logging.Level;
 +import java.util.logging.Logger;
 +
 +import org.apache.asterix.api.common.AsterixHyracksIntegrationUtil;
 +import org.apache.asterix.common.api.IAsterixAppRuntimeContext;
 +import org.apache.asterix.common.config.GlobalConfig;
 +import org.apache.asterix.external.util.ExternalDataConstants;
 +import org.apache.asterix.external.util.IdentitiyResolverFactory;
 +import org.apache.asterix.testframework.xml.TestGroup;
 +import org.apache.asterix.testframework.xml.TestSuite;
 +import org.apache.hyracks.control.nc.NodeControllerService;
 +import org.apache.hyracks.storage.common.buffercache.BufferCache;
 +
 +public class ExecutionTestUtil {
 +
 +    protected static final Logger LOGGER = Logger.getLogger(ExecutionTest.class.getName());
 +
 +    protected static final String PATH_ACTUAL = "rttest" + File.separator;
 +
 +    protected static final String TEST_CONFIG_FILE_NAME = "asterix-build-configuration.xml";
 +
 +    protected static TestGroup FailedGroup;
 +
-     public static void setUp() throws Exception {
++    public static void setUp(boolean cleanup) throws Exception {
 +        System.out.println("Starting setup");
 +        if (LOGGER.isLoggable(Level.INFO)) {
 +            LOGGER.info("Starting setup");
 +        }
 +        System.setProperty(GlobalConfig.CONFIG_FILE_PROPERTY, TEST_CONFIG_FILE_NAME);
 +
 +        if (LOGGER.isLoggable(Level.INFO)) {
 +            LOGGER.info("initializing pseudo cluster");
 +        }
-         AsterixHyracksIntegrationUtil.init(true);
++        AsterixHyracksIntegrationUtil.init(cleanup);
 +
 +        if (LOGGER.isLoggable(Level.INFO)) {
 +            LOGGER.info("initializing HDFS");
 +        }
 +
 +        HDFSCluster.getInstance().setup();
 +
 +        // Set the node resolver to be the identity resolver that expects node
 +        // names
 +        // to be node controller ids; a valid assumption in test environment.
 +        System.setProperty(ExternalDataConstants.NODE_RESOLVER_FACTORY_PROPERTY,
 +                IdentitiyResolverFactory.class.getName());
 +
 +        FailedGroup = new TestGroup();
 +        FailedGroup.setName("failed");
 +    }
 +
 +    private static void validateBufferCacheState() {
 +        for (NodeControllerService nc : AsterixHyracksIntegrationUtil.ncs) {
 +            IAsterixAppRuntimeContext appCtx = (IAsterixAppRuntimeContext) nc.getApplicationContext()
 +                    .getApplicationObject();
 +            if (!((BufferCache) appCtx.getBufferCache()).isClean()) {
 +                throw new IllegalStateException();
 +            }
 +        }
 +    }
 +
-     public static void tearDown() throws Exception {
++    public static void tearDown(boolean cleanup) throws Exception {
 +        // validateBufferCacheState(); <-- Commented out until bug is fixed -->
-         AsterixHyracksIntegrationUtil.deinit(true);
++        AsterixHyracksIntegrationUtil.deinit(cleanup);
 +        File outdir = new File(PATH_ACTUAL);
 +        File[] files = outdir.listFiles();
 +        if (files == null || files.length == 0) {
 +            outdir.delete();
 +        }
 +        HDFSCluster.getInstance().cleanup();
 +
 +        if (FailedGroup != null && FailedGroup.getTestCase().size() > 0) {
 +            File temp = File.createTempFile("failed", ".xml");
 +            javax.xml.bind.JAXBContext jaxbCtx = null;
 +            jaxbCtx = javax.xml.bind.JAXBContext.newInstance(TestSuite.class.getPackage().getName());
 +            javax.xml.bind.Marshaller marshaller = null;
 +            marshaller = jaxbCtx.createMarshaller();
 +            marshaller.setProperty(javax.xml.bind.Marshaller.JAXB_FORMATTED_OUTPUT, Boolean.TRUE);
 +            TestSuite failedSuite = new TestSuite();
 +            failedSuite.setResultOffsetPath("results");
 +            failedSuite.setQueryOffsetPath("queries");
 +            failedSuite.getTestGroup().add(FailedGroup);
 +            marshaller.marshal(failedSuite, temp);
 +            System.err.println("The failed.xml is written to :" + temp.getAbsolutePath()
 +                    + ". You can copy it to only.xml by the following cmd:" + "\rcp " + temp.getAbsolutePath() + " "
 +                    + Paths.get("./src/test/resources/runtimets/only.xml").toAbsolutePath());
 +        }
 +    }
 +
 +}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/runtime/SqlppExecutionTest.java
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/java/org/apache/asterix/test/runtime/SqlppExecutionTest.java
index cbb14c5,0000000..b827a0d
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/runtime/SqlppExecutionTest.java
+++ b/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/runtime/SqlppExecutionTest.java
@@@ -1,100 -1,0 +1,102 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +package org.apache.asterix.test.runtime;
 +
 +import java.io.File;
 +import java.util.ArrayList;
 +import java.util.Collection;
 +import java.util.logging.Logger;
 +
 +import org.apache.asterix.api.common.AsterixHyracksIntegrationUtil;
 +import org.apache.asterix.common.config.AsterixTransactionProperties;
 +import org.apache.asterix.test.aql.TestExecutor;
 +import org.apache.asterix.testframework.context.TestCaseContext;
 +import org.apache.asterix.testframework.xml.TestGroup;
 +import org.apache.commons.lang3.StringUtils;
 +import org.junit.AfterClass;
 +import org.junit.BeforeClass;
 +import org.junit.Test;
 +import org.junit.runner.RunWith;
 +import org.junit.runners.Parameterized;
 +import org.junit.runners.Parameterized.Parameters;
 +
 +/**
 + * Runs the runtime test cases under 'asterix-app/src/test/resources/runtimets'.
 + */
 +@RunWith(Parameterized.class)
 +public class SqlppExecutionTest {
 +
 +    protected static final Logger LOGGER = Logger.getLogger(SqlppExecutionTest.class.getName());
 +
 +    protected static final String PATH_ACTUAL = "rttest" + File.separator;
 +    protected static final String PATH_BASE = StringUtils.join(new String[] { "src", "test", "resources", "runtimets" },
 +            File.separator);
 +
 +    protected static final String TEST_CONFIG_FILE_NAME = "asterix-build-configuration.xml";
 +
 +    protected static AsterixTransactionProperties txnProperties;
-     private final static TestExecutor testExecutor = new TestExecutor();
++    private static final TestExecutor testExecutor = new TestExecutor();
++    private static final boolean cleanupOnStart = true;
++    private static final boolean cleanupOnStop = true;
 +
 +    protected static TestGroup FailedGroup;
 +
 +    @BeforeClass
 +    public static void setUp() throws Exception {
 +        File outdir = new File(PATH_ACTUAL);
 +        outdir.mkdirs();
-         ExecutionTestUtil.setUp();
++        ExecutionTestUtil.setUp(cleanupOnStart);
 +    }
 +
 +    @AfterClass
 +    public static void tearDown() throws Exception {
-         ExecutionTestUtil.tearDown();
++        ExecutionTestUtil.tearDown(cleanupOnStop);
 +        AsterixHyracksIntegrationUtil.removeTestStorageFiles();
 +    }
 +
 +    @Parameters(name = "SqlppExecutionTest {index}: {0}")
 +    public static Collection<Object[]> tests() throws Exception {
 +        Collection<Object[]> testArgs = buildTestsInXml("only_sqlpp.xml");
 +        if (testArgs.size() == 0) {
 +            testArgs = buildTestsInXml("testsuite_sqlpp.xml");
 +        }
 +        return testArgs;
 +    }
 +
 +    protected static Collection<Object[]> buildTestsInXml(String xmlfile) throws Exception {
 +        Collection<Object[]> testArgs = new ArrayList<Object[]>();
 +        TestCaseContext.Builder b = new TestCaseContext.Builder();
 +        for (TestCaseContext ctx : b.build(new File(PATH_BASE), xmlfile)) {
 +            testArgs.add(new Object[] { ctx });
 +        }
 +        return testArgs;
 +
 +    }
 +
 +    protected TestCaseContext tcCtx;
 +
 +    public SqlppExecutionTest(TestCaseContext tcCtx) {
 +        this.tcCtx = tcCtx;
 +    }
 +
 +    @Test
 +    public void test() throws Exception {
 +        testExecutor.executeTest(PATH_ACTUAL, tcCtx, null, false, FailedGroup);
 +    }
 +}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/sqlpp/ParserTestExecutor.java
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/java/org/apache/asterix/test/sqlpp/ParserTestExecutor.java
index d6cf231,0000000..7f7fbb4
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/sqlpp/ParserTestExecutor.java
+++ b/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/sqlpp/ParserTestExecutor.java
@@@ -1,181 -1,0 +1,182 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +package org.apache.asterix.test.sqlpp;
 +
 +import static org.mockito.Mockito.mock;
 +import static org.mockito.Mockito.when;
 +
 +import java.io.File;
 +import java.io.FileOutputStream;
 +import java.io.PrintWriter;
 +import java.util.ArrayList;
 +import java.util.List;
 +import java.util.Map;
 +
 +import org.apache.asterix.common.config.GlobalConfig;
 +import org.apache.asterix.common.exceptions.AsterixException;
 +import org.apache.asterix.lang.common.base.IParser;
 +import org.apache.asterix.lang.common.base.IParserFactory;
 +import org.apache.asterix.lang.common.base.IQueryRewriter;
 +import org.apache.asterix.lang.common.base.IRewriterFactory;
 +import org.apache.asterix.lang.common.base.Statement;
 +import org.apache.asterix.lang.common.base.Statement.Kind;
 +import org.apache.asterix.lang.common.rewrites.LangRewritingContext;
 +import org.apache.asterix.lang.common.statement.DataverseDecl;
 +import org.apache.asterix.lang.common.statement.FunctionDecl;
 +import org.apache.asterix.lang.common.statement.Query;
 +import org.apache.asterix.lang.common.util.FunctionUtil;
 +import org.apache.asterix.lang.sqlpp.parser.SqlppParserFactory;
 +import org.apache.asterix.lang.sqlpp.rewrites.SqlppRewriterFactory;
 +import org.apache.asterix.lang.sqlpp.util.SqlppAstPrintUtil;
 +import org.apache.asterix.metadata.declared.AqlMetadataProvider;
 +import org.apache.asterix.test.aql.TestExecutor;
 +import org.apache.asterix.testframework.context.TestCaseContext;
 +import org.apache.asterix.testframework.context.TestFileContext;
 +import org.apache.asterix.testframework.xml.TestCase.CompilationUnit;
 +import org.apache.asterix.testframework.xml.TestGroup;
 +
 +import junit.extensions.PA;
 +
 +public class ParserTestExecutor extends TestExecutor {
 +
 +    private IParserFactory sqlppParserFactory = new SqlppParserFactory();
 +    private IRewriterFactory sqlppRewriterFactory = new SqlppRewriterFactory();
 +
 +    @Override
 +    public void executeTest(String actualPath, TestCaseContext testCaseCtx, ProcessBuilder pb,
 +            boolean isDmlRecoveryTest, TestGroup failedGroup) throws Exception {
 +        int queryCount = 0;
 +        List<CompilationUnit> cUnits = testCaseCtx.getTestCase().getCompilationUnit();
 +        for (CompilationUnit cUnit : cUnits) {
 +            LOGGER.info(
 +                    "Starting [TEST]: " + testCaseCtx.getTestCase().getFilePath() + "/" + cUnit.getName() + " ... ");
 +            List<TestFileContext> testFileCtxs = testCaseCtx.getTestFiles(cUnit);
 +            List<TestFileContext> expectedResultFileCtxs = testCaseCtx.getExpectedResultFiles(cUnit);
 +            for (TestFileContext ctx : testFileCtxs) {
 +                File testFile = ctx.getFile();
 +                try {
 +                    if (queryCount >= expectedResultFileCtxs.size()) {
 +                        throw new IllegalStateException("no result file for " + testFile.toString() + "; queryCount: "
 +                                + queryCount + ", filectxs.size: " + expectedResultFileCtxs.size());
 +                    }
 +
 +                    // Runs the test query.
 +                    File actualResultFile = testCaseCtx.getActualResultFile(cUnit, new File(actualPath));
 +                    File expectedResultFile = expectedResultFileCtxs.get(queryCount).getFile();
 +                    testSQLPPParser(testFile, actualResultFile, expectedResultFile);
 +
 +                    LOGGER.info(
 +                            "[TEST]: " + testCaseCtx.getTestCase().getFilePath() + "/" + cUnit.getName() + " PASSED ");
 +                    queryCount++;
 +                } catch (Exception e) {
 +                    System.err.println("testFile " + testFile.toString() + " raised an exception:");
 +                    e.printStackTrace();
 +                    if (cUnit.getExpectedError().isEmpty()) {
 +                        System.err.println("...Unexpected!");
 +                        if (failedGroup != null) {
 +                            failedGroup.getTestCase().add(testCaseCtx.getTestCase());
 +                        }
 +                        throw new Exception("Test \"" + testFile + "\" FAILED!", e);
 +                    } else {
 +                        LOGGER.info("[TEST]: " + testCaseCtx.getTestCase().getFilePath() + "/" + cUnit.getName()
 +                                + " failed as expected: " + e.getMessage());
 +                        System.err.println("...but that was expected.");
 +                    }
 +                }
 +            }
 +        }
 +
 +    }
 +
 +    // Tests the SQL++ parser.
 +    public void testSQLPPParser(File queryFile, File actualResultFile, File expectedFile) throws Exception {
 +        actualResultFile.getParentFile().mkdirs();
 +        PrintWriter writer = new PrintWriter(new FileOutputStream(actualResultFile));
 +        IParser parser = sqlppParserFactory.createParser(readTestFile(queryFile));
 +        GlobalConfig.ASTERIX_LOGGER.info(queryFile.toString());
 +        try {
 +            List<Statement> statements = parser.parse();
 +            List<FunctionDecl> functions = getDeclaredFunctions(statements);
 +            String dvName = getDefaultDataverse(statements);
 +            AqlMetadataProvider aqlMetadataProvider = mock(AqlMetadataProvider.class);
 +
 +            @SuppressWarnings("unchecked")
 +            Map<String, String> config = mock(Map.class);
 +            when(aqlMetadataProvider.getDefaultDataverseName()).thenReturn(dvName);
 +            when(aqlMetadataProvider.getConfig()).thenReturn(config);
 +            when(config.get(FunctionUtil.IMPORT_PRIVATE_FUNCTIONS)).thenReturn("true");
 +
 +            for (Statement st : statements) {
 +                if (st.getKind() == Kind.QUERY) {
 +                    Query query = (Query) st;
 +                    IQueryRewriter rewriter = sqlppRewriterFactory.createQueryRewriter();
 +                    rewrite(rewriter, functions, query, aqlMetadataProvider,
 +                            new LangRewritingContext(query.getVarCounter()));
 +                }
 +                SqlppAstPrintUtil.print(st, writer);
 +            }
 +            writer.close();
 +            // Compares the actual result and the expected result.
 +            runScriptAndCompareWithResult(queryFile, new PrintWriter(System.err), expectedFile, actualResultFile);
 +        } catch (Exception e) {
 +            GlobalConfig.ASTERIX_LOGGER.warning("Failed while testing file " + queryFile);
 +            throw e;
 +        } finally {
 +            writer.close();
 +        }
 +    }
 +
 +    // Extracts declared functions.
 +    private List<FunctionDecl> getDeclaredFunctions(List<Statement> statements) {
 +        List<FunctionDecl> functionDecls = new ArrayList<FunctionDecl>();
 +        for (Statement st : statements) {
 +            if (st.getKind().equals(Statement.Kind.FUNCTION_DECL)) {
 +                functionDecls.add((FunctionDecl) st);
 +            }
 +        }
 +        return functionDecls;
 +    }
 +
 +    // Gets the default dataverse for the input statements.
 +    private String getDefaultDataverse(List<Statement> statements) {
 +        for (Statement st : statements) {
 +            if (st.getKind().equals(Statement.Kind.DATAVERSE_DECL)) {
 +                DataverseDecl dv = (DataverseDecl) st;
 +                return dv.getDataverseName().getValue();
 +            }
 +        }
 +        return null;
 +    }
 +
 +    // Rewrite queries.
 +    // Note: we do not do inline function rewriting here because this needs real
 +    // metadata access.
 +    private void rewrite(IQueryRewriter rewriter, List<FunctionDecl> declaredFunctions, Query topExpr,
 +            AqlMetadataProvider metadataProvider, LangRewritingContext context) throws AsterixException {
 +        PA.invokeMethod(rewriter,
 +                "setup(java.util.List, org.apache.asterix.lang.common.statement.Query, org.apache.asterix.metadata.declared.AqlMetadataProvider, "
 +                        + "org.apache.asterix.lang.common.rewrites.LangRewritingContext)",
 +                declaredFunctions, topExpr, metadataProvider, context);
 +        PA.invokeMethod(rewriter, "inlineColumnAlias()");
++        PA.invokeMethod(rewriter, "rewriteGlobalAggregations()");
 +        PA.invokeMethod(rewriter, "rewriteGroupBys()");
 +        PA.invokeMethod(rewriter, "variableCheckAndRewrite(boolean)", Boolean.TRUE);
 +    }
 +
 +}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/optimizerts/queries_sqlpp/count-tweets.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/optimizerts/queries_sqlpp/count-tweets.sqlpp
index a2ddf4b,0000000..0bea252
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/optimizerts/queries_sqlpp/count-tweets.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/optimizerts/queries_sqlpp/count-tweets.sqlpp
@@@ -1,43 -1,0 +1,43 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +
 +drop  database twitter if exists;
 +create  database twitter;
 +
 +use twitter;
 +
 +
 +create type twitter.Tweet as
 +{
 +  id : int32,
 +  tweetid : int64,
 +  loc : point,
 +  time : datetime,
 +  text : string
 +}
 +
 +create external  table TwitterData(Tweet) using localfs(("path"="asterix_nc1://data/twitter/smalltweets.txt"),("format"="adm"));
 +
 +write output to asterix_nc1:"/tmp/count-tweets.adm"
- select element {'word':tok,'count':twitter.count(token)}
++select element {'word':tok,'count':count(token)}
 +from  TwitterData as t,
 +      tokens as token
 +with  tokens as twitter."word-tokens"(t.text)
 +group by token as tok
 +;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/optimizerts/queries_sqlpp/denorm-cust-order.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/optimizerts/queries_sqlpp/denorm-cust-order.sqlpp
index b5fc4ea,0000000..d8e2b7a
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/optimizerts/queries_sqlpp/denorm-cust-order.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/optimizerts/queries_sqlpp/denorm-cust-order.sqlpp
@@@ -1,70 -1,0 +1,70 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +
 +drop  database custorder if exists;
 +create  database custorder;
 +
 +use custorder;
 +
 +
 +create type custorder.AddressType as
 + closed {
 +  number : int32,
 +  street : string,
 +  city : string
 +}
 +
 +create type custorder.CustomerType as
 + closed {
 +  cid : int32,
 +  name : string,
 +  age : int32?,
 +  address : AddressType?,
 +  lastorder : {
 +      oid : int32,
 +      total : float
 +  }
 +
 +}
 +
 +create type custorder.OrderType as
 + closed {
 +  oid : int32,
 +  cid : int32,
 +  orderstatus : string,
 +  orderpriority : string,
 +  clerk : string,
 +  total : float
 +}
 +
- create  nodegroup group1 if not exists  on 
++create  nodegroup group1 if not exists  on
 +    asterix_nc1,
 +    asterix_nc2
 +;
 +create  table Customers(CustomerType) primary key cid on group1;
 +
 +create  table Orders(OrderType) primary key oid on group1;
 +
 +write output to asterix_nc1:"/tmp/custorder.adm"
- select element {'cid':cid,'cust':cust,'cnt-orders':custorder.count(o),'orders':o}
++select element {'cid':cid,'cust':cust,'cnt-orders':count(o),'orders':o}
 +from  Customers as c,
 +      Orders as o
 +where (c.cid = o.cid)
 +group by c.cid as cid
 +;



[07/50] [abbrv] incubator-asterixdb git commit: Merge branch 'master' into hyracks-merge2

Posted by im...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/DatasetUtils.java
----------------------------------------------------------------------
diff --cc asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/DatasetUtils.java
index 581d01c,0000000..0ac4f56
mode 100644,000000..100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/DatasetUtils.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/DatasetUtils.java
@@@ -1,277 -1,0 +1,282 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +
 +package org.apache.asterix.metadata.utils;
 +
 +import java.io.DataOutput;
 +import java.util.List;
 +import java.util.Map;
 +
 +import org.apache.asterix.builders.IARecordBuilder;
 +import org.apache.asterix.builders.RecordBuilder;
 +import org.apache.asterix.common.config.DatasetConfig.DatasetType;
 +import org.apache.asterix.common.config.MetadataConstants;
 +import org.apache.asterix.common.context.CorrelatedPrefixMergePolicyFactory;
 +import org.apache.asterix.common.exceptions.AsterixException;
 +import org.apache.asterix.external.indexing.IndexingConstants;
 +import org.apache.asterix.formats.nontagged.AqlSerializerDeserializerProvider;
 +import org.apache.asterix.formats.nontagged.AqlTypeTraitProvider;
 +import org.apache.asterix.metadata.MetadataException;
 +import org.apache.asterix.metadata.MetadataManager;
 +import org.apache.asterix.metadata.MetadataTransactionContext;
++import org.apache.asterix.metadata.declared.AqlMetadataProvider;
 +import org.apache.asterix.metadata.entities.CompactionPolicy;
 +import org.apache.asterix.metadata.entities.Dataset;
 +import org.apache.asterix.metadata.entities.ExternalDatasetDetails;
 +import org.apache.asterix.metadata.entities.InternalDatasetDetails;
 +import org.apache.asterix.om.base.AMutableString;
 +import org.apache.asterix.om.base.AString;
 +import org.apache.asterix.om.types.ARecordType;
 +import org.apache.asterix.om.types.BuiltinType;
 +import org.apache.asterix.om.types.IAType;
 +import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
 +import org.apache.hyracks.algebricks.common.utils.Pair;
 +import org.apache.hyracks.algebricks.data.IBinaryComparatorFactoryProvider;
 +import org.apache.hyracks.algebricks.data.IBinaryHashFunctionFactoryProvider;
 +import org.apache.hyracks.api.dataflow.value.IBinaryComparatorFactory;
 +import org.apache.hyracks.api.dataflow.value.IBinaryHashFunctionFactory;
 +import org.apache.hyracks.api.dataflow.value.ISerializerDeserializer;
 +import org.apache.hyracks.api.dataflow.value.ITypeTraits;
 +import org.apache.hyracks.api.exceptions.HyracksDataException;
 +import org.apache.hyracks.data.std.util.ArrayBackedValueStorage;
 +import org.apache.hyracks.storage.am.lsm.common.api.ILSMMergePolicyFactory;
 +
 +public class DatasetUtils {
 +    public static IBinaryComparatorFactory[] computeKeysBinaryComparatorFactories(Dataset dataset, ARecordType itemType,
 +            IBinaryComparatorFactoryProvider comparatorFactoryProvider) throws AlgebricksException {
 +        List<List<String>> partitioningKeys = getPartitioningKeys(dataset);
 +        IBinaryComparatorFactory[] bcfs = new IBinaryComparatorFactory[partitioningKeys.size()];
 +        if (dataset.getDatasetType() == DatasetType.EXTERNAL) {
 +            // Get comparators for RID fields.
 +            for (int i = 0; i < partitioningKeys.size(); i++) {
 +                try {
 +                    bcfs[i] = IndexingConstants.getComparatorFactory(i);
 +                } catch (AsterixException e) {
 +                    throw new AlgebricksException(e);
 +                }
 +            }
 +        } else {
 +            for (int i = 0; i < partitioningKeys.size(); i++) {
 +                IAType keyType = itemType.getSubFieldType(partitioningKeys.get(i));
 +                bcfs[i] = comparatorFactoryProvider.getBinaryComparatorFactory(keyType, true);
 +            }
 +        }
 +        return bcfs;
 +    }
 +
 +    public static int[] createBloomFilterKeyFields(Dataset dataset) throws AlgebricksException {
 +        if (dataset.getDatasetType() == DatasetType.EXTERNAL) {
 +            throw new AlgebricksException("not implemented");
 +        }
 +        List<List<String>> partitioningKeys = getPartitioningKeys(dataset);
 +        int[] bloomFilterKeyFields = new int[partitioningKeys.size()];
 +        for (int i = 0; i < partitioningKeys.size(); ++i) {
 +            bloomFilterKeyFields[i] = i;
 +        }
 +        return bloomFilterKeyFields;
 +    }
 +
 +    public static IBinaryHashFunctionFactory[] computeKeysBinaryHashFunFactories(Dataset dataset, ARecordType itemType,
 +            IBinaryHashFunctionFactoryProvider hashFunProvider) throws AlgebricksException {
 +        if (dataset.getDatasetType() == DatasetType.EXTERNAL) {
 +            throw new AlgebricksException("not implemented");
 +        }
 +        List<List<String>> partitioningKeys = getPartitioningKeys(dataset);
 +        IBinaryHashFunctionFactory[] bhffs = new IBinaryHashFunctionFactory[partitioningKeys.size()];
 +        for (int i = 0; i < partitioningKeys.size(); i++) {
 +            IAType keyType = itemType.getSubFieldType(partitioningKeys.get(i));
 +            bhffs[i] = hashFunProvider.getBinaryHashFunctionFactory(keyType);
 +        }
 +        return bhffs;
 +    }
 +
-     public static ITypeTraits[] computeTupleTypeTraits(Dataset dataset, ARecordType itemType)
-             throws AlgebricksException {
-         return computeTupleTypeTraits(dataset, itemType, null);
-     }
- 
 +    public static ITypeTraits[] computeTupleTypeTraits(Dataset dataset, ARecordType itemType, ARecordType metaItemType)
 +            throws AlgebricksException {
 +        if (dataset.getDatasetType() == DatasetType.EXTERNAL) {
 +            throw new AlgebricksException("not implemented");
 +        }
 +        List<List<String>> partitioningKeys = DatasetUtils.getPartitioningKeys(dataset);
 +        int numKeys = partitioningKeys.size();
 +        ITypeTraits[] typeTraits;
 +        if (metaItemType != null) {
 +            typeTraits = new ITypeTraits[numKeys + 2];
 +            List<Integer> indicator = ((InternalDatasetDetails) dataset.getDatasetDetails()).getKeySourceIndicator();
 +            typeTraits[numKeys + 1] = AqlTypeTraitProvider.INSTANCE.getTypeTrait(metaItemType);
 +            for (int i = 0; i < numKeys; i++) {
 +                IAType keyType;
 +                if (indicator.get(i) == 0) {
 +                    keyType = itemType.getSubFieldType(partitioningKeys.get(i));
 +                } else {
 +                    keyType = metaItemType.getSubFieldType(partitioningKeys.get(i));
 +                }
 +                typeTraits[i] = AqlTypeTraitProvider.INSTANCE.getTypeTrait(keyType);
 +            }
 +        } else {
 +            typeTraits = new ITypeTraits[numKeys + 1];
 +            for (int i = 0; i < numKeys; i++) {
 +                IAType keyType;
 +                keyType = itemType.getSubFieldType(partitioningKeys.get(i));
 +                typeTraits[i] = AqlTypeTraitProvider.INSTANCE.getTypeTrait(keyType);
 +            }
 +        }
 +        typeTraits[numKeys] = AqlTypeTraitProvider.INSTANCE.getTypeTrait(itemType);
 +        return typeTraits;
 +    }
 +
 +    public static List<List<String>> getPartitioningKeys(Dataset dataset) {
 +        if (dataset.getDatasetType() == DatasetType.EXTERNAL) {
 +            return IndexingConstants.getRIDKeys(((ExternalDatasetDetails) dataset.getDatasetDetails()).getProperties());
 +        }
 +        return ((InternalDatasetDetails) dataset.getDatasetDetails()).getPartitioningKey();
 +    }
 +
 +    public static List<String> getFilterField(Dataset dataset) {
 +        return (((InternalDatasetDetails) dataset.getDatasetDetails())).getFilterField();
 +    }
 +
 +    public static IBinaryComparatorFactory[] computeFilterBinaryComparatorFactories(Dataset dataset,
 +            ARecordType itemType, IBinaryComparatorFactoryProvider comparatorFactoryProvider)
-                     throws AlgebricksException {
++            throws AlgebricksException {
 +        if (dataset.getDatasetType() == DatasetType.EXTERNAL) {
 +            return null;
 +        }
 +        List<String> filterField = getFilterField(dataset);
 +        if (filterField == null) {
 +            return null;
 +        }
 +        IBinaryComparatorFactory[] bcfs = new IBinaryComparatorFactory[1];
 +        IAType type = itemType.getSubFieldType(filterField);
 +        bcfs[0] = comparatorFactoryProvider.getBinaryComparatorFactory(type, true);
 +        return bcfs;
 +    }
 +
 +    public static ITypeTraits[] computeFilterTypeTraits(Dataset dataset, ARecordType itemType)
 +            throws AlgebricksException {
 +        if (dataset.getDatasetType() == DatasetType.EXTERNAL) {
 +            return null;
 +        }
 +        List<String> filterField = getFilterField(dataset);
 +        if (filterField == null) {
 +            return null;
 +        }
 +        ITypeTraits[] typeTraits = new ITypeTraits[1];
 +        IAType type = itemType.getSubFieldType(filterField);
 +        typeTraits[0] = AqlTypeTraitProvider.INSTANCE.getTypeTrait(type);
 +        return typeTraits;
 +    }
 +
 +    public static int[] createFilterFields(Dataset dataset) throws AlgebricksException {
 +        if (dataset.getDatasetType() == DatasetType.EXTERNAL) {
 +            return null;
 +        }
 +
 +        List<String> filterField = getFilterField(dataset);
 +        if (filterField == null) {
 +            return null;
 +        }
 +        List<List<String>> partitioningKeys = DatasetUtils.getPartitioningKeys(dataset);
 +        int numKeys = partitioningKeys.size();
 +
 +        int[] filterFields = new int[1];
 +        filterFields[0] = numKeys + 1;
 +        return filterFields;
 +    }
 +
 +    public static int[] createBTreeFieldsWhenThereisAFilter(Dataset dataset) throws AlgebricksException {
 +        if (dataset.getDatasetType() == DatasetType.EXTERNAL) {
 +            return null;
 +        }
 +
 +        List<String> filterField = getFilterField(dataset);
 +        if (filterField == null) {
 +            return null;
 +        }
 +
 +        List<List<String>> partitioningKeys = getPartitioningKeys(dataset);
 +        int valueFields = dataset.hasMetaPart() ? 2 : 1;
 +        int[] btreeFields = new int[partitioningKeys.size() + valueFields];
 +        for (int i = 0; i < btreeFields.length; ++i) {
 +            btreeFields[i] = i;
 +        }
 +        return btreeFields;
 +    }
 +
 +    public static int getPositionOfPartitioningKeyField(Dataset dataset, String fieldExpr) {
 +        List<List<String>> partitioningKeys = DatasetUtils.getPartitioningKeys(dataset);
 +        for (int i = 0; i < partitioningKeys.size(); i++) {
 +            if ((partitioningKeys.get(i).size() == 1) && partitioningKeys.get(i).get(0).equals(fieldExpr)) {
 +                return i;
 +            }
 +        }
 +        return -1;
 +    }
 +
 +    public static Pair<ILSMMergePolicyFactory, Map<String, String>> getMergePolicyFactory(Dataset dataset,
 +            MetadataTransactionContext mdTxnCtx) throws AlgebricksException, MetadataException {
 +        String policyName = dataset.getCompactionPolicy();
 +        CompactionPolicy compactionPolicy = MetadataManager.INSTANCE.getCompactionPolicy(mdTxnCtx,
 +                MetadataConstants.METADATA_DATAVERSE_NAME, policyName);
 +        String compactionPolicyFactoryClassName = compactionPolicy.getClassName();
 +        ILSMMergePolicyFactory mergePolicyFactory;
 +        try {
 +            mergePolicyFactory = (ILSMMergePolicyFactory) Class.forName(compactionPolicyFactoryClassName).newInstance();
 +            if (mergePolicyFactory.getName().compareTo("correlated-prefix") == 0) {
 +                ((CorrelatedPrefixMergePolicyFactory) mergePolicyFactory).setDatasetID(dataset.getDatasetId());
 +            }
 +        } catch (InstantiationException | IllegalAccessException | ClassNotFoundException e) {
 +            throw new AlgebricksException(e);
 +        }
 +        Map<String, String> properties = dataset.getCompactionPolicyProperties();
 +        return new Pair<ILSMMergePolicyFactory, Map<String, String>>(mergePolicyFactory, properties);
 +    }
 +
 +    @SuppressWarnings("unchecked")
 +    public static void writePropertyTypeRecord(String name, String value, DataOutput out, ARecordType recordType)
 +            throws HyracksDataException {
 +        IARecordBuilder propertyRecordBuilder = new RecordBuilder();
 +        ArrayBackedValueStorage fieldValue = new ArrayBackedValueStorage();
 +        propertyRecordBuilder.reset(recordType);
 +        AMutableString aString = new AMutableString("");
 +        ISerializerDeserializer<AString> stringSerde = AqlSerializerDeserializerProvider.INSTANCE
 +                .getSerializerDeserializer(BuiltinType.ASTRING);
 +
 +        // write field 0
 +        fieldValue.reset();
 +        aString.setValue(name);
 +        stringSerde.serialize(aString, fieldValue.getDataOutput());
 +        propertyRecordBuilder.addField(0, fieldValue);
 +
 +        // write field 1
 +        fieldValue.reset();
 +        aString.setValue(value);
 +        stringSerde.serialize(aString, fieldValue.getDataOutput());
 +        propertyRecordBuilder.addField(1, fieldValue);
 +
 +        propertyRecordBuilder.write(out, true);
 +    }
++
++    public static ARecordType getMetaType(AqlMetadataProvider metadataProvider, Dataset dataset)
++            throws AlgebricksException {
++        if (dataset.hasMetaPart()) {
++            return (ARecordType) metadataProvider.findType(dataset.getMetaItemTypeDataverseName(),
++                    dataset.getMetaItemTypeName());
++        }
++        return null;
++    }
 +}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/util/AsterixRuntimeUtil.java
----------------------------------------------------------------------
diff --cc asterixdb/asterix-om/src/main/java/org/apache/asterix/om/util/AsterixRuntimeUtil.java
index 51c3802,0000000..0e9aa0c
mode 100644,000000..100644
--- a/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/util/AsterixRuntimeUtil.java
+++ b/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/util/AsterixRuntimeUtil.java
@@@ -1,59 -1,0 +1,63 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +package org.apache.asterix.om.util;
 +
 +import java.net.InetAddress;
 +import java.util.ArrayList;
 +import java.util.Collection;
 +import java.util.HashMap;
 +import java.util.List;
 +import java.util.Map;
 +import java.util.Set;
 +
++import org.apache.hyracks.control.cc.ClusterControllerService;
++
 +/**
 + * Utility class for obtaining information on the set of Hyracks NodeController
 + * processes that are running on a given host.
 + */
 +public class AsterixRuntimeUtil {
 +
 +    public static Set<String> getNodeControllersOnIP(InetAddress ipAddress) throws Exception {
 +        Map<InetAddress, Set<String>> nodeControllerInfo = getNodeControllerMap();
 +        Set<String> nodeControllersAtLocation = nodeControllerInfo.get(ipAddress);
 +        return nodeControllersAtLocation;
 +    }
 +
 +    public static List<String> getAllNodeControllers() throws Exception {
 +        Collection<Set<String>> nodeControllersCollection = getNodeControllerMap().values();
 +        List<String> nodeControllers = new ArrayList<String>();
 +        for (Set<String> ncCollection : nodeControllersCollection) {
 +            nodeControllers.addAll(ncCollection);
 +        }
 +        return nodeControllers;
 +    }
 +
 +    public static Map<InetAddress, Set<String>> getNodeControllerMap() throws Exception {
 +        Map<InetAddress, Set<String>> map = new HashMap<InetAddress, Set<String>>();
 +        AsterixAppContextInfo.getInstance().getCCApplicationContext().getCCContext().getIPAddressNodeMap(map);
 +        return map;
 +    }
 +
 +    public static void getNodeControllerMap(Map<InetAddress, Set<String>> map) throws Exception {
-         AsterixAppContextInfo.getInstance().getCCApplicationContext().getCCContext().getIPAddressNodeMap(map);
++        ClusterControllerService ccs = (ClusterControllerService) AsterixAppContextInfo.getInstance()
++                .getCCApplicationContext().getControllerService();
++        map.putAll(ccs.getIpAddressNodeNameMap());
 +    }
 +}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-replication/src/main/java/org/apache/asterix/replication/storage/LSMIndexFileProperties.java
----------------------------------------------------------------------
diff --cc asterixdb/asterix-replication/src/main/java/org/apache/asterix/replication/storage/LSMIndexFileProperties.java
index 2bf5fa3,0000000..a349e51
mode 100644,000000..100644
--- a/asterixdb/asterix-replication/src/main/java/org/apache/asterix/replication/storage/LSMIndexFileProperties.java
+++ b/asterixdb/asterix-replication/src/main/java/org/apache/asterix/replication/storage/LSMIndexFileProperties.java
@@@ -1,154 -1,0 +1,154 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +package org.apache.asterix.replication.storage;
 +
 +import java.io.DataInput;
 +import java.io.DataOutputStream;
 +import java.io.File;
 +import java.io.IOException;
 +import java.io.OutputStream;
 +
 +import org.apache.asterix.common.utils.StoragePathUtil;
 +import org.apache.hyracks.storage.am.common.api.IMetaDataPageManager;
 +
 +public class LSMIndexFileProperties {
 +
 +    private String fileName;
 +    private long fileSize;
 +    private String nodeId;
 +    private String dataverse;
 +    private String idxName;
 +    private boolean lsmComponentFile;
 +    private String filePath;
 +    private boolean requiresAck = false;
 +    private long LSNByteOffset;
 +    private int partition;
 +
 +    public LSMIndexFileProperties() {
 +    }
 +
 +    public LSMIndexFileProperties(String filePath, long fileSize, String nodeId, boolean lsmComponentFile,
 +            long LSNByteOffset, boolean requiresAck) {
 +        initialize(filePath, fileSize, nodeId, lsmComponentFile, LSNByteOffset, requiresAck);
 +    }
 +
 +    public LSMIndexFileProperties(LSMComponentProperties lsmComponentProperties) {
 +        initialize(lsmComponentProperties.getComponentId(), -1, lsmComponentProperties.getNodeId(), false,
 +                IMetaDataPageManager.INVALID_LSN_OFFSET, false);
 +    }
 +
 +    public void initialize(String filePath, long fileSize, String nodeId, boolean lsmComponentFile, long LSNByteOffset,
 +            boolean requiresAck) {
 +        this.filePath = filePath;
 +        this.fileSize = fileSize;
 +        this.nodeId = nodeId;
 +        this.lsmComponentFile = lsmComponentFile;
 +        this.LSNByteOffset = LSNByteOffset;
 +        this.requiresAck = requiresAck;
 +    }
 +
 +    public void splitFileName() {
 +        String[] tokens = filePath.split(File.separator);
 +        int arraySize = tokens.length;
 +        this.fileName = tokens[arraySize - 1];
 +        this.idxName = tokens[arraySize - 2];
 +        this.dataverse = tokens[arraySize - 3];
-         this.partition = StoragePathUtil.getPartitonNumFromName(tokens[arraySize - 4]);
++        this.partition = StoragePathUtil.getPartitionNumFromName(tokens[arraySize - 4]);
 +    }
 +
 +    public void serialize(OutputStream out) throws IOException {
 +        DataOutputStream dos = new DataOutputStream(out);
 +        dos.writeUTF(nodeId);
 +        dos.writeUTF(filePath);
 +        dos.writeLong(fileSize);
 +        dos.writeBoolean(lsmComponentFile);
 +        dos.writeLong(LSNByteOffset);
 +        dos.writeBoolean(requiresAck);
 +    }
 +
 +    public static LSMIndexFileProperties create(DataInput input) throws IOException {
 +        String nodeId = input.readUTF();
 +        String filePath = input.readUTF();
 +        long fileSize = input.readLong();
 +        boolean lsmComponentFile = input.readBoolean();
 +        long LSNByteOffset = input.readLong();
 +        boolean requiresAck = input.readBoolean();
 +        LSMIndexFileProperties fileProp = new LSMIndexFileProperties(filePath, fileSize, nodeId, lsmComponentFile,
 +                LSNByteOffset, requiresAck);
 +        return fileProp;
 +    }
 +
 +    public String getFilePath() {
 +        return filePath;
 +    }
 +
 +    public long getFileSize() {
 +        return fileSize;
 +    }
 +
 +    public String getFileName() {
 +        return fileName;
 +    }
 +
 +    public String getNodeId() {
 +        return nodeId;
 +    }
 +
 +    public String getDataverse() {
 +        return dataverse;
 +    }
 +
 +    public void setDataverse(String dataverse) {
 +        this.dataverse = dataverse;
 +    }
 +
 +    public String getIdxName() {
 +        return idxName;
 +    }
 +
 +    public boolean isLSMComponentFile() {
 +        return lsmComponentFile;
 +    }
 +
 +    public boolean requiresAck() {
 +        return requiresAck;
 +    }
 +
 +    @Override
 +    public String toString() {
 +        StringBuilder sb = new StringBuilder();
 +        sb.append("File Name: " + fileName + "  ");
 +        sb.append("File Size: " + fileSize + "  ");
 +        sb.append("Node ID: " + nodeId + "  ");
 +        sb.append("Partition: " + partition + "  ");
 +        sb.append("IDX Name: " + idxName + "  ");
 +        sb.append("isLSMComponentFile : " + lsmComponentFile + "  ");
 +        sb.append("Dataverse: " + dataverse);
 +        sb.append("LSN Byte Offset: " + LSNByteOffset);
 +        return sb.toString();
 +    }
 +
 +    public long getLSNByteOffset() {
 +        return LSNByteOffset;
 +    }
 +
 +    public int getPartition() {
 +        return partition;
 +    }
 +}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-runtime/pom.xml
----------------------------------------------------------------------
diff --cc asterixdb/asterix-runtime/pom.xml
index 6414139,0000000..1914336
mode 100644,000000..100644
--- a/asterixdb/asterix-runtime/pom.xml
+++ b/asterixdb/asterix-runtime/pom.xml
@@@ -1,82 -1,0 +1,72 @@@
 +<!--
 + ! Licensed to the Apache Software Foundation (ASF) under one
 + ! or more contributor license agreements.  See the NOTICE file
 + ! distributed with this work for additional information
 + ! regarding copyright ownership.  The ASF licenses this file
 + ! to you under the Apache License, Version 2.0 (the
 + ! "License"); you may not use this file except in compliance
 + ! with the License.  You may obtain a copy of the License at
 + !
 + !   http://www.apache.org/licenses/LICENSE-2.0
 + !
 + ! Unless required by applicable law or agreed to in writing,
 + ! software distributed under the License is distributed on an
 + ! "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + ! KIND, either express or implied.  See the License for the
 + ! specific language governing permissions and limitations
 + ! under the License.
 + !-->
 +<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
 +    <modelVersion>4.0.0</modelVersion>
 +    <parent>
 +        <artifactId>apache-asterixdb</artifactId>
 +        <groupId>org.apache.asterix</groupId>
 +        <version>0.8.9-SNAPSHOT</version>
 +    </parent>
 +    <artifactId>asterix-runtime</artifactId>
 +    <properties>
 +        <appendedResourcesDirectory>${basedir}/../src/main/appended-resources</appendedResourcesDirectory>
 +    </properties>
 +    <licenses>
 +        <license>
 +            <name>Apache License, Version 2.0</name>
 +            <url>http://www.apache.org/licenses/LICENSE-2.0.txt</url>
 +            <distribution>repo</distribution>
 +            <comments>A business-friendly OSS license</comments>
 +        </license>
 +    </licenses>
 +    <dependencies>
 +        <dependency>
 +            <groupId>org.apache.asterix</groupId>
 +            <artifactId>asterix-om</artifactId>
 +            <version>0.8.9-SNAPSHOT</version>
 +            <scope>compile</scope>
 +        </dependency>
 +        <dependency>
 +            <groupId>org.apache.asterix</groupId>
 +            <artifactId>asterix-fuzzyjoin</artifactId>
 +            <version>0.8.9-SNAPSHOT</version>
 +            <scope>compile</scope>
 +        </dependency>
 +        <dependency>
 +            <groupId>org.apache.hyracks</groupId>
 +            <artifactId>hyracks-storage-am-btree</artifactId>
 +        </dependency>
 +        <dependency>
 +            <groupId>org.apache.asterix</groupId>
 +            <artifactId>asterix-transactions</artifactId>
 +            <version>0.8.9-SNAPSHOT</version>
 +            <scope>compile</scope>
 +        </dependency>
 +        <dependency>
-             <groupId>org.twitter4j</groupId>
-             <artifactId>twitter4j-core</artifactId>
-             <version>[4.0,)</version>
-         </dependency>
-         <dependency>
-             <groupId>org.twitter4j</groupId>
-             <artifactId>twitter4j-stream</artifactId>
-             <version>[4.0,)</version>
-         </dependency>
-         <dependency>
 +            <groupId>org.apache.hadoop</groupId>
 +            <artifactId>hadoop-client</artifactId>
 +            <type>jar</type>
 +            <scope>compile</scope>
 +        </dependency>
 +        <dependency>
 +            <groupId>org.apache.hyracks</groupId>
 +            <artifactId>hyracks-api</artifactId>
 +        </dependency>
 +    </dependencies>
 +</project>

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/resource/PersistentLocalResourceRepository.java
----------------------------------------------------------------------
diff --cc asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/resource/PersistentLocalResourceRepository.java
index 3a1e729,0000000..561b144
mode 100644,000000..100644
--- a/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/resource/PersistentLocalResourceRepository.java
+++ b/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/resource/PersistentLocalResourceRepository.java
@@@ -1,467 -1,0 +1,478 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +package org.apache.asterix.transaction.management.resource;
 +
 +import java.io.File;
 +import java.io.FileInputStream;
 +import java.io.FileOutputStream;
 +import java.io.FilenameFilter;
 +import java.io.IOException;
 +import java.io.ObjectInputStream;
 +import java.io.ObjectOutputStream;
 +import java.nio.file.Files;
 +import java.nio.file.Path;
 +import java.nio.file.Paths;
 +import java.util.Collections;
 +import java.util.HashMap;
 +import java.util.HashSet;
 +import java.util.List;
 +import java.util.Set;
 +import java.util.SortedMap;
 +import java.util.concurrent.ConcurrentHashMap;
 +import java.util.logging.Level;
 +import java.util.logging.Logger;
 +
 +import org.apache.asterix.common.cluster.ClusterPartition;
 +import org.apache.asterix.common.config.AsterixMetadataProperties;
 +import org.apache.asterix.common.replication.AsterixReplicationJob;
 +import org.apache.asterix.common.replication.IReplicationManager;
 +import org.apache.asterix.common.utils.StoragePathUtil;
 +import org.apache.commons.io.FileUtils;
 +import org.apache.hyracks.api.exceptions.HyracksDataException;
 +import org.apache.hyracks.api.io.IODeviceHandle;
 +import org.apache.hyracks.api.replication.IReplicationJob.ReplicationExecutionType;
 +import org.apache.hyracks.api.replication.IReplicationJob.ReplicationJobType;
 +import org.apache.hyracks.api.replication.IReplicationJob.ReplicationOperation;
 +import org.apache.hyracks.storage.common.file.ILocalResourceRepository;
 +import org.apache.hyracks.storage.common.file.LocalResource;
 +
 +import com.google.common.cache.Cache;
 +import com.google.common.cache.CacheBuilder;
 +
 +public class PersistentLocalResourceRepository implements ILocalResourceRepository {
 +
 +    private static final Logger LOGGER = Logger.getLogger(PersistentLocalResourceRepository.class.getName());
 +    private final String[] mountPoints;
 +    private static final String STORAGE_METADATA_DIRECTORY = "asterix_root_metadata";
 +    private static final String STORAGE_METADATA_FILE_NAME_PREFIX = ".asterix_root_metadata";
 +    private static final long STORAGE_LOCAL_RESOURCE_ID = -4321;
 +    public static final String METADATA_FILE_NAME = ".metadata";
 +    private final Cache<String, LocalResource> resourceCache;
 +    private final String nodeId;
 +    private static final int MAX_CACHED_RESOURCES = 1000;
 +    private IReplicationManager replicationManager;
 +    private boolean isReplicationEnabled = false;
 +    private Set<String> filesToBeReplicated;
 +    private final SortedMap<Integer, ClusterPartition> clusterPartitions;
 +    private final Set<Integer> nodeOriginalPartitions;
 +    private final Set<Integer> nodeActivePartitions;
 +    private Set<Integer> nodeInactivePartitions;
 +
 +    public PersistentLocalResourceRepository(List<IODeviceHandle> devices, String nodeId,
 +            AsterixMetadataProperties metadataProperties) throws HyracksDataException {
 +        mountPoints = new String[devices.size()];
 +        this.nodeId = nodeId;
 +        this.clusterPartitions = metadataProperties.getClusterPartitions();
 +        for (int i = 0; i < mountPoints.length; i++) {
 +            String mountPoint = devices.get(i).getPath().getPath();
 +            File mountPointDir = new File(mountPoint);
 +            if (!mountPointDir.exists()) {
 +                throw new HyracksDataException(mountPointDir.getAbsolutePath() + " doesn't exist.");
 +            }
 +            if (!mountPoint.endsWith(System.getProperty("file.separator"))) {
 +                mountPoints[i] = new String(mountPoint + System.getProperty("file.separator"));
 +            } else {
 +                mountPoints[i] = new String(mountPoint);
 +            }
 +        }
 +        resourceCache = CacheBuilder.newBuilder().maximumSize(MAX_CACHED_RESOURCES).build();
 +
 +        ClusterPartition[] nodePartitions = metadataProperties.getNodePartitions().get(nodeId);
 +        //initially the node active partitions are the same as the original partitions
 +        nodeOriginalPartitions = new HashSet<>(nodePartitions.length);
 +        nodeActivePartitions = new HashSet<>(nodePartitions.length);
 +        for (ClusterPartition partition : nodePartitions) {
 +            nodeOriginalPartitions.add(partition.getPartitionId());
 +            nodeActivePartitions.add(partition.getPartitionId());
 +        }
 +    }
 +
 +    private static String getStorageMetadataDirPath(String mountPoint, String nodeId, int ioDeviceId) {
 +        return mountPoint + STORAGE_METADATA_DIRECTORY + File.separator + nodeId + "_" + "iodevice" + ioDeviceId;
 +    }
 +
 +    private static File getStorageMetadataBaseDir(File storageMetadataFile) {
 +        //STORAGE_METADATA_DIRECTORY / Node Id / STORAGE_METADATA_FILE_NAME_PREFIX
 +        return storageMetadataFile.getParentFile().getParentFile();
 +    }
 +
 +    public void initializeNewUniverse(String storageRootDirName) throws HyracksDataException {
 +        if (LOGGER.isLoggable(Level.INFO)) {
 +            LOGGER.info("Initializing local resource repository ... ");
 +        }
 +
 +        //create storage metadata file (This file is used to locate the root storage directory after instance restarts).
 +        //TODO with the existing cluster configuration file being static and distributed on all NCs, we can find out the storage root
 +        //directory without looking at this file. This file could potentially store more information, otherwise no need to keep it.
 +        for (int i = 0; i < mountPoints.length; i++) {
 +            File storageMetadataFile = getStorageMetadataFile(mountPoints[i], nodeId, i);
 +            File storageMetadataDir = storageMetadataFile.getParentFile();
 +            //make dirs for the storage metadata file
 +            boolean success = storageMetadataDir.mkdirs();
 +            if (!success) {
 +                throw new IllegalStateException(
 +                        "Unable to create storage metadata directory of PersistentLocalResourceRepository in "
 +                                + storageMetadataDir.getAbsolutePath() + " or directory already exists");
 +            }
 +
 +            LOGGER.log(Level.INFO,
 +                    "created the root-metadata-file's directory: " + storageMetadataDir.getAbsolutePath());
 +
 +            String storageRootDirPath;
 +            if (storageRootDirName.startsWith(System.getProperty("file.separator"))) {
 +                storageRootDirPath = new String(
 +                        mountPoints[i] + storageRootDirName.substring(System.getProperty("file.separator").length()));
 +            } else {
 +                storageRootDirPath = new String(mountPoints[i] + storageRootDirName);
 +            }
 +
 +            LocalResource rootLocalResource = new LocalResource(STORAGE_LOCAL_RESOURCE_ID,
 +                    storageMetadataFile.getAbsolutePath(), 0, storageMetadataFile.getAbsolutePath(), 0,
 +                    storageRootDirPath);
 +            insert(rootLocalResource);
 +            LOGGER.log(Level.INFO, "created the root-metadata-file: " + storageMetadataFile.getAbsolutePath());
 +        }
 +        LOGGER.log(Level.INFO, "Completed the initialization of the local resource repository");
 +    }
 +
 +    @Override
 +    public LocalResource getResourceByPath(String path) throws HyracksDataException {
 +        LocalResource resource = resourceCache.getIfPresent(path);
 +        if (resource == null) {
 +            File resourceFile = getLocalResourceFileByName(path);
 +            if (resourceFile.exists()) {
 +                resource = readLocalResource(resourceFile);
 +                resourceCache.put(path, resource);
 +            }
 +        }
 +        return resource;
 +    }
 +
 +    @Override
 +    public synchronized void insert(LocalResource resource) throws HyracksDataException {
 +        File resourceFile = new File(getFileName(resource.getResourcePath(), resource.getResourceId()));
 +        if (resourceFile.exists()) {
 +            throw new HyracksDataException("Duplicate resource: " + resourceFile.getAbsolutePath());
 +        } else {
 +            resourceFile.getParentFile().mkdirs();
 +        }
 +
 +        if (resource.getResourceId() != STORAGE_LOCAL_RESOURCE_ID) {
 +            resourceCache.put(resource.getResourcePath(), resource);
 +        }
 +
 +        try (FileOutputStream fos = new FileOutputStream(resourceFile);
 +                ObjectOutputStream oosToFos = new ObjectOutputStream(fos)) {
 +            oosToFos.writeObject(resource);
 +            oosToFos.flush();
 +        } catch (IOException e) {
 +            throw new HyracksDataException(e);
 +        }
 +
 +        //if replication enabled, send resource metadata info to remote nodes
 +        if (isReplicationEnabled && resource.getResourceId() != STORAGE_LOCAL_RESOURCE_ID) {
 +            String filePath = getFileName(resource.getResourcePath(), resource.getResourceId());
 +            createReplicationJob(ReplicationOperation.REPLICATE, filePath);
 +        }
 +    }
 +
 +    @Override
 +    public synchronized void deleteResourceByPath(String resourcePath) throws HyracksDataException {
 +        File resourceFile = getLocalResourceFileByName(resourcePath);
 +        if (resourceFile.exists()) {
 +            resourceFile.delete();
 +            resourceCache.invalidate(resourcePath);
 +
 +            //if replication enabled, delete resource from remote replicas
 +            if (isReplicationEnabled && !resourceFile.getName().startsWith(STORAGE_METADATA_FILE_NAME_PREFIX)) {
 +                createReplicationJob(ReplicationOperation.DELETE, resourceFile.getAbsolutePath());
 +            }
 +        } else {
 +            throw new HyracksDataException("Resource doesn't exist");
 +        }
 +    }
 +
 +    private static File getLocalResourceFileByName(String resourcePath) {
 +        return new File(resourcePath + File.separator + METADATA_FILE_NAME);
 +    }
 +
 +    public HashMap<Long, LocalResource> loadAndGetAllResources() throws HyracksDataException {
 +        //TODO During recovery, the memory usage currently is proportional to the number of resources available.
 +        //This could be fixed by traversing all resources on disk until the required resource is found.
 +        HashMap<Long, LocalResource> resourcesMap = new HashMap<Long, LocalResource>();
 +
 +        for (int i = 0; i < mountPoints.length; i++) {
 +            File storageRootDir = getStorageRootDirectoryIfExists(mountPoints[i], nodeId, i);
 +            if (storageRootDir == null) {
 +                continue;
 +            }
 +
 +            //load all local resources.
 +            File[] partitions = storageRootDir.listFiles();
 +            for (File partition : partitions) {
 +                File[] dataverseFileList = partition.listFiles();
 +                if (dataverseFileList != null) {
 +                    for (File dataverseFile : dataverseFileList) {
 +                        if (dataverseFile.isDirectory()) {
 +                            File[] indexFileList = dataverseFile.listFiles();
 +                            if (indexFileList != null) {
 +                                for (File indexFile : indexFileList) {
 +                                    if (indexFile.isDirectory()) {
 +                                        File[] metadataFiles = indexFile.listFiles(METADATA_FILES_FILTER);
 +                                        if (metadataFiles != null) {
 +                                            for (File metadataFile : metadataFiles) {
 +                                                LocalResource localResource = readLocalResource(metadataFile);
 +                                                resourcesMap.put(localResource.getResourceId(), localResource);
 +                                            }
 +                                        }
 +                                    }
 +                                }
 +                            }
 +                        }
 +                    }
 +                }
 +            }
 +        }
 +
 +        return resourcesMap;
 +    }
 +
 +    @Override
 +    public long getMaxResourceID() throws HyracksDataException {
 +        long maxResourceId = 0;
 +
 +        for (int i = 0; i < mountPoints.length; i++) {
 +            File storageRootDir = getStorageRootDirectoryIfExists(mountPoints[i], nodeId, i);
 +            if (storageRootDir == null) {
 +                continue;
 +            }
 +
 +            //load all local resources.
 +            File[] partitions = storageRootDir.listFiles();
 +            for (File partition : partitions) {
 +                //traverse all local resources.
 +                File[] dataverseFileList = partition.listFiles();
 +                if (dataverseFileList != null) {
 +                    for (File dataverseFile : dataverseFileList) {
 +                        if (dataverseFile.isDirectory()) {
 +                            File[] indexFileList = dataverseFile.listFiles();
 +                            if (indexFileList != null) {
 +                                for (File indexFile : indexFileList) {
 +                                    if (indexFile.isDirectory()) {
 +                                        File[] metadataFiles = indexFile.listFiles(METADATA_FILES_FILTER);
 +                                        if (metadataFiles != null) {
 +                                            for (File metadataFile : metadataFiles) {
 +                                                LocalResource localResource = readLocalResource(metadataFile);
 +                                                maxResourceId = Math.max(maxResourceId, localResource.getResourceId());
 +                                            }
 +                                        }
 +                                    }
 +                                }
 +                            }
 +                        }
 +                    }
 +                }
 +            }
 +        }
 +
 +        return maxResourceId;
 +    }
 +
 +    private static String getFileName(String baseDir, long resourceId) {
 +        if (resourceId == STORAGE_LOCAL_RESOURCE_ID) {
 +            return baseDir;
 +        } else {
 +            if (!baseDir.endsWith(System.getProperty("file.separator"))) {
 +                baseDir += System.getProperty("file.separator");
 +            }
 +            return new String(baseDir + METADATA_FILE_NAME);
 +        }
 +    }
 +
 +    public static LocalResource readLocalResource(File file) throws HyracksDataException {
 +        try (FileInputStream fis = new FileInputStream(file);
 +                ObjectInputStream oisFromFis = new ObjectInputStream(fis)) {
 +            LocalResource resource = (LocalResource) oisFromFis.readObject();
 +            return resource;
 +        } catch (Exception e) {
 +            throw new HyracksDataException(e);
 +        }
 +    }
 +
 +    private static final FilenameFilter METADATA_FILES_FILTER = new FilenameFilter() {
 +        @Override
 +        public boolean accept(File dir, String name) {
 +            if (name.equalsIgnoreCase(METADATA_FILE_NAME)) {
 +                return true;
 +            } else {
 +                return false;
 +            }
 +        }
 +    };
 +
 +    public void setReplicationManager(IReplicationManager replicationManager) {
 +        this.replicationManager = replicationManager;
 +        isReplicationEnabled = replicationManager.isReplicationEnabled();
 +
 +        if (isReplicationEnabled) {
 +            filesToBeReplicated = new HashSet<String>();
 +            nodeInactivePartitions = ConcurrentHashMap.newKeySet();
 +        }
 +    }
 +
 +    private void createReplicationJob(ReplicationOperation operation, String filePath) throws HyracksDataException {
-         filesToBeReplicated.clear();
-         filesToBeReplicated.add(filePath);
-         AsterixReplicationJob job = new AsterixReplicationJob(ReplicationJobType.METADATA, operation,
-                 ReplicationExecutionType.SYNC, filesToBeReplicated);
-         try {
-             replicationManager.submitJob(job);
-         } catch (IOException e) {
-             throw new HyracksDataException(e);
++        /**
++         * Durable resources path format:
++         * /partition/dataverse/idx/fileName
++         * Temporary resources path format:
++         * /partition/TEMP_DATASETS_STORAGE_FOLDER/dataverse/idx/fileName
++         */
++        String[] fileNameTokens = filePath.split(File.separator);
++        String partitionDir = fileNameTokens[fileNameTokens.length - 4];
++        //exclude temporary datasets resources
++        if (!partitionDir.equals(StoragePathUtil.TEMP_DATASETS_STORAGE_FOLDER)) {
++            filesToBeReplicated.clear();
++            filesToBeReplicated.add(filePath);
++            AsterixReplicationJob job = new AsterixReplicationJob(ReplicationJobType.METADATA, operation,
++                    ReplicationExecutionType.SYNC, filesToBeReplicated);
++            try {
++                replicationManager.submitJob(job);
++            } catch (IOException e) {
++                throw new HyracksDataException(e);
++            }
 +        }
 +    }
 +
 +    public String[] getStorageMountingPoints() {
 +        return mountPoints;
 +    }
 +
 +    /**
 +     * Deletes physical files of all data verses.
 +     *
 +     * @param deleteStorageMetadata
 +     * @throws IOException
 +     */
 +    public void deleteStorageData(boolean deleteStorageMetadata) throws IOException {
 +        for (int i = 0; i < mountPoints.length; i++) {
 +            File storageDir = getStorageRootDirectoryIfExists(mountPoints[i], nodeId, i);
 +            if (storageDir != null) {
 +                if (storageDir.isDirectory()) {
 +                    FileUtils.deleteDirectory(storageDir);
 +                }
 +            }
 +
 +            if (deleteStorageMetadata) {
 +                //delete the metadata root directory
 +                File storageMetadataFile = getStorageMetadataFile(mountPoints[i], nodeId, i);
 +                File storageMetadataDir = getStorageMetadataBaseDir(storageMetadataFile);
 +                if (storageMetadataDir.exists() && storageMetadataDir.isDirectory()) {
 +                    FileUtils.deleteDirectory(storageMetadataDir);
 +                }
 +            }
 +        }
 +    }
 +
 +    /**
 +     * @param mountPoint
 +     * @param nodeId
 +     * @param ioDeviceId
 +     * @return A file reference to the storage metadata file.
 +     */
 +    private static File getStorageMetadataFile(String mountPoint, String nodeId, int ioDeviceId) {
 +        String storageMetadataFileName = getStorageMetadataDirPath(mountPoint, nodeId, ioDeviceId) + File.separator
 +                + STORAGE_METADATA_FILE_NAME_PREFIX;
 +        File storageMetadataFile = new File(storageMetadataFileName);
 +        return storageMetadataFile;
 +    }
 +
 +    /**
 +     * @param mountPoint
 +     * @param nodeId
 +     * @param ioDeviceId
 +     * @return A file reference to the storage root directory if exists, otherwise null.
 +     * @throws HyracksDataException
 +     */
 +    public static File getStorageRootDirectoryIfExists(String mountPoint, String nodeId, int ioDeviceId)
 +            throws HyracksDataException {
 +        File storageRootDir = null;
 +        File storageMetadataFile = getStorageMetadataFile(mountPoint, nodeId, ioDeviceId);
 +        if (storageMetadataFile.exists()) {
 +            LocalResource rootLocalResource = readLocalResource(storageMetadataFile);
 +            String storageRootDirPath = (String) rootLocalResource.getResourceObject();
 +            Path path = Paths.get(storageRootDirPath);
 +            if (Files.exists(path)) {
 +                storageRootDir = new File(storageRootDirPath);
 +            }
 +        }
 +        return storageRootDir;
 +    }
 +
 +    /**
 +     * @param partition
 +     * @return The partition local path on this NC.
 +     */
 +    public String getPartitionPath(int partition) {
 +        //currently each partition is replicated on the same IO device number on all NCs.
 +        return mountPoints[getIODeviceNum(partition)];
 +    }
 +
 +    public int getIODeviceNum(int partition) {
 +        return clusterPartitions.get(partition).getIODeviceNum();
 +    }
 +
 +    public Set<Integer> getActivePartitions() {
 +        return Collections.unmodifiableSet(nodeActivePartitions);
 +    }
 +
 +    public Set<Integer> getInactivePartitions() {
 +        return Collections.unmodifiableSet(nodeInactivePartitions);
 +    }
 +
 +    public Set<Integer> getNodeOrignalPartitions() {
 +        return Collections.unmodifiableSet(nodeOriginalPartitions);
 +    }
 +
 +    public synchronized void addActivePartition(int partitonId) {
 +        nodeActivePartitions.add(partitonId);
 +        nodeInactivePartitions.remove(partitonId);
 +    }
 +
 +    public synchronized void addInactivePartition(int partitonId) {
 +        nodeInactivePartitions.add(partitonId);
 +        nodeActivePartitions.remove(partitonId);
 +    }
 +
 +    /**
 +     * @param resourceAbsolutePath
 +     * @return the resource relative path starting from the partition directory
 +     */
 +    public static String getResourceRelativePath(String resourceAbsolutePath) {
 +        String[] tokens = resourceAbsolutePath.split(File.separator);
-         //partiton/dataverse/idx/fileName
++        //partition/dataverse/idx/fileName
 +        return tokens[tokens.length - 4] + File.separator + tokens[tokens.length - 3] + File.separator
 +                + tokens[tokens.length - 2] + File.separator + tokens[tokens.length - 1];
 +    }
 +
 +    public static int getResourcePartition(String resourceAbsolutePath) {
 +        String[] tokens = resourceAbsolutePath.split(File.separator);
-         //partiton/dataverse/idx/fileName
-         return StoragePathUtil.getPartitonNumFromName(tokens[tokens.length - 4]);
++        //partition/dataverse/idx/fileName
++        return StoragePathUtil.getPartitionNumFromName(tokens[tokens.length - 4]);
 +    }
 +}


[16/50] [abbrv] incubator-asterixdb git commit: Merge branch 'master' into hyracks-merge2

Posted by im...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/dataflow/ChangeFeedDataFlowController.java
----------------------------------------------------------------------
diff --cc asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/dataflow/ChangeFeedDataFlowController.java
index 8ec422f,0000000..a301ac9
mode 100644,000000..100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/dataflow/ChangeFeedDataFlowController.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/dataflow/ChangeFeedDataFlowController.java
@@@ -1,45 -1,0 +1,47 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +package org.apache.asterix.external.dataflow;
 +
 +import java.io.IOException;
 +
 +import org.apache.asterix.external.api.IRawRecord;
 +import org.apache.asterix.external.api.IRecordReader;
 +import org.apache.asterix.external.api.IRecordWithPKDataParser;
 +import org.apache.asterix.external.util.FeedLogManager;
 +import org.apache.hyracks.api.context.IHyracksTaskContext;
++import org.apache.hyracks.api.exceptions.HyracksDataException;
 +import org.apache.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
 +
 +public class ChangeFeedDataFlowController<T> extends FeedRecordDataFlowController<T> {
 +
 +    private final IRecordWithPKDataParser<T> dataParser;
 +
 +    public ChangeFeedDataFlowController(final IHyracksTaskContext ctx, final FeedTupleForwarder tupleForwarder,
 +            final FeedLogManager feedLogManager, final int numOfOutputFields,
-             final IRecordWithPKDataParser<T> dataParser, final IRecordReader<T> recordReader) {
++            final IRecordWithPKDataParser<T> dataParser, final IRecordReader<T> recordReader)
++            throws HyracksDataException {
 +        super(ctx, tupleForwarder, feedLogManager, numOfOutputFields, dataParser, recordReader);
 +        this.dataParser = dataParser;
 +    }
 +
 +    @Override
 +    protected void addPrimaryKeys(final ArrayTupleBuilder tb, final IRawRecord<? extends T> record) throws IOException {
 +        dataParser.appendKeys(tb, record);
 +    }
 +}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/dataflow/ChangeFeedWithMetaDataFlowController.java
----------------------------------------------------------------------
diff --cc asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/dataflow/ChangeFeedWithMetaDataFlowController.java
index 370eec0,0000000..aac7be2
mode 100644,000000..100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/dataflow/ChangeFeedWithMetaDataFlowController.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/dataflow/ChangeFeedWithMetaDataFlowController.java
@@@ -1,42 -1,0 +1,44 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +package org.apache.asterix.external.dataflow;
 +
 +import java.io.IOException;
 +
 +import org.apache.asterix.external.api.IRawRecord;
 +import org.apache.asterix.external.api.IRecordReader;
 +import org.apache.asterix.external.parser.RecordWithMetadataParser;
 +import org.apache.asterix.external.util.FeedLogManager;
 +import org.apache.hyracks.api.context.IHyracksTaskContext;
++import org.apache.hyracks.api.exceptions.HyracksDataException;
 +import org.apache.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
 +
 +public class ChangeFeedWithMetaDataFlowController<T, O> extends FeedWithMetaDataFlowController<T, O> {
 +
 +    public ChangeFeedWithMetaDataFlowController(final IHyracksTaskContext ctx, final FeedTupleForwarder tupleForwarder,
 +            final FeedLogManager feedLogManager, final int numOfOutputFields,
-             final RecordWithMetadataParser<T, O> dataParser, final IRecordReader<T> recordReader) {
++            final RecordWithMetadataParser<T, O> dataParser, final IRecordReader<T> recordReader)
++            throws HyracksDataException {
 +        super(ctx, tupleForwarder, feedLogManager, numOfOutputFields, dataParser, recordReader);
 +    }
 +
 +    @Override
 +    protected void addPrimaryKeys(final ArrayTupleBuilder tb, final IRawRecord<? extends T> record) throws IOException {
 +        dataParser.appendPK(tb);
 +    }
 +}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/dataflow/FeedRecordDataFlowController.java
----------------------------------------------------------------------
diff --cc asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/dataflow/FeedRecordDataFlowController.java
index 6401234,0000000..a092620
mode 100644,000000..100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/dataflow/FeedRecordDataFlowController.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/dataflow/FeedRecordDataFlowController.java
@@@ -1,169 -1,0 +1,172 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +package org.apache.asterix.external.dataflow;
 +
 +import java.io.IOException;
 +import java.util.concurrent.atomic.AtomicBoolean;
 +
 +import javax.annotation.Nonnull;
 +
 +import org.apache.asterix.external.api.IRawRecord;
 +import org.apache.asterix.external.api.IRecordDataParser;
 +import org.apache.asterix.external.api.IRecordReader;
 +import org.apache.asterix.external.util.ExternalDataConstants;
 +import org.apache.asterix.external.util.ExternalDataExceptionUtils;
 +import org.apache.asterix.external.util.FeedLogManager;
 +import org.apache.hyracks.api.comm.IFrameWriter;
 +import org.apache.hyracks.api.context.IHyracksTaskContext;
 +import org.apache.hyracks.api.exceptions.HyracksDataException;
 +import org.apache.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
 +import org.apache.log4j.Logger;
 +
 +public class FeedRecordDataFlowController<T> extends AbstractFeedDataFlowController {
 +    private static final Logger LOGGER = Logger.getLogger(FeedRecordDataFlowController.class.getName());
 +    protected final IRecordDataParser<T> dataParser;
 +    protected final IRecordReader<? extends T> recordReader;
 +    protected final AtomicBoolean closed = new AtomicBoolean(false);
 +    protected final long interval = 1000;
 +    protected boolean failed = false;
 +
 +    public FeedRecordDataFlowController(IHyracksTaskContext ctx, FeedTupleForwarder tupleForwarder,
 +            @Nonnull FeedLogManager feedLogManager, int numOfOutputFields, @Nonnull IRecordDataParser<T> dataParser,
-             @Nonnull IRecordReader<T> recordReader) {
++            @Nonnull IRecordReader<T> recordReader) throws HyracksDataException {
 +        super(ctx, tupleForwarder, feedLogManager, numOfOutputFields);
 +        this.dataParser = dataParser;
 +        this.recordReader = recordReader;
 +        recordReader.setFeedLogManager(feedLogManager);
 +        recordReader.setController(this);
 +    }
 +
 +    @Override
 +    public void start(IFrameWriter writer) throws HyracksDataException {
 +        HyracksDataException hde = null;
 +        try {
 +            failed = false;
 +            tupleForwarder.initialize(ctx, writer);
 +            while (recordReader.hasNext()) {
 +                IRawRecord<? extends T> record = recordReader.next();
 +                if (record == null) {
 +                    flush();
 +                    Thread.sleep(interval);
 +                    continue;
 +                }
 +                tb.reset();
 +                try {
 +                    dataParser.parse(record, tb.getDataOutput());
 +                } catch (Exception e) {
 +                    e.printStackTrace();
 +                    LOGGER.warn(ExternalDataConstants.ERROR_PARSE_RECORD, e);
 +                    feedLogManager.logRecord(record.toString(), ExternalDataConstants.ERROR_PARSE_RECORD);
 +                    continue;
 +                }
 +                tb.addFieldEndOffset();
 +                addMetaPart(tb, record);
 +                addPrimaryKeys(tb, record);
 +                if (tb.getSize() > tupleForwarder.getMaxRecordSize()) {
 +                    // log
 +                    feedLogManager.logRecord(record.toString(), ExternalDataConstants.ERROR_LARGE_RECORD);
 +                    continue;
 +                }
 +                tupleForwarder.addTuple(tb);
 +            }
++        } catch (InterruptedException e) {
++            //TODO: Find out what could cause an interrupted exception beside termination of a job/feed
++            LOGGER.warn("Feed has been interrupted. Closing the feed");
 +        } catch (Exception e) {
 +            failed = true;
 +            tupleForwarder.flush();
 +            LOGGER.warn("Failure while operating a feed source", e);
 +            throw new HyracksDataException(e);
 +        }
 +        try {
 +            tupleForwarder.close();
 +        } catch (Throwable th) {
 +            hde = ExternalDataExceptionUtils.suppress(hde, th);
 +        }
 +        try {
 +            recordReader.close();
 +        } catch (Throwable th) {
 +            LOGGER.warn("Failure during while operating a feed sourcec", th);
 +            hde = ExternalDataExceptionUtils.suppress(hde, th);
 +        } finally {
 +            closeSignal();
 +            if (hde != null) {
 +                throw hde;
 +            }
 +        }
 +    }
 +
 +    protected void addMetaPart(ArrayTupleBuilder tb, IRawRecord<? extends T> record) throws IOException {
 +    }
 +
 +    protected void addPrimaryKeys(ArrayTupleBuilder tb, IRawRecord<? extends T> record) throws IOException {
 +    }
 +
 +    private void closeSignal() {
 +        synchronized (closed) {
 +            closed.set(true);
 +            closed.notifyAll();
 +        }
 +    }
 +
 +    private void waitForSignal() throws InterruptedException {
 +        synchronized (closed) {
 +            while (!closed.get()) {
 +                closed.wait();
 +            }
 +        }
 +    }
 +
 +    @Override
 +    public boolean stop() throws HyracksDataException {
 +        HyracksDataException hde = null;
 +        if (recordReader.stop()) {
 +            if (failed) {
 +                // failed, close here
 +                try {
 +                    tupleForwarder.close();
 +                } catch (Throwable th) {
 +                    hde = ExternalDataExceptionUtils.suppress(hde, th);
 +                }
 +                try {
 +                    recordReader.close();
 +                } catch (Throwable th) {
 +                    hde = ExternalDataExceptionUtils.suppress(hde, th);
 +                }
 +                if (hde != null) {
 +                    throw hde;
 +                }
 +            } else {
 +                try {
 +                    waitForSignal();
 +                } catch (InterruptedException e) {
 +                    throw new HyracksDataException(e);
 +                }
 +            }
 +            return true;
 +        }
 +        return false;
 +    }
 +
 +    @Override
 +    public boolean handleException(Throwable th) {
 +        // This is not a parser record. most likely, this error happened in the record reader.
 +        return recordReader.handleException(th);
 +    }
 +}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/dataflow/FeedWithMetaDataFlowController.java
----------------------------------------------------------------------
diff --cc asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/dataflow/FeedWithMetaDataFlowController.java
index 203b5a7,0000000..e7c396b
mode 100644,000000..100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/dataflow/FeedWithMetaDataFlowController.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/dataflow/FeedWithMetaDataFlowController.java
@@@ -1,47 -1,0 +1,48 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +package org.apache.asterix.external.dataflow;
 +
 +import java.io.IOException;
 +
 +import org.apache.asterix.external.api.IRawRecord;
 +import org.apache.asterix.external.api.IRecordReader;
 +import org.apache.asterix.external.parser.RecordWithMetadataParser;
 +import org.apache.asterix.external.util.FeedLogManager;
 +import org.apache.hyracks.api.context.IHyracksTaskContext;
++import org.apache.hyracks.api.exceptions.HyracksDataException;
 +import org.apache.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
 +
 +public class FeedWithMetaDataFlowController<T, O> extends FeedRecordDataFlowController<T> {
 +
 +    //This field mask a super class field dataParser. We do this to avoid down-casting when calling parseMeta
 +    protected RecordWithMetadataParser<T, O> dataParser;
 +
 +    public FeedWithMetaDataFlowController(IHyracksTaskContext ctx, FeedTupleForwarder tupleForwarder,
 +            FeedLogManager feedLogManager, int numOfOutputFields, RecordWithMetadataParser<T, O> dataParser,
-             IRecordReader<T> recordReader) {
++            IRecordReader<T> recordReader) throws HyracksDataException {
 +        super(ctx, tupleForwarder, feedLogManager, numOfOutputFields, dataParser, recordReader);
 +        this.dataParser = dataParser;
 +    }
 +
 +    @Override
 +    protected void addMetaPart(ArrayTupleBuilder tb, IRawRecord<? extends T> record) throws IOException {
 +        dataParser.parseMeta(tb.getDataOutput());
 +        tb.addFieldEndOffset();
 +    }
 +}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/HDFSDataSourceFactory.java
----------------------------------------------------------------------
diff --cc asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/HDFSDataSourceFactory.java
index de185e0,0000000..529977a
mode 100644,000000..100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/HDFSDataSourceFactory.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/HDFSDataSourceFactory.java
@@@ -1,201 -1,0 +1,229 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +package org.apache.asterix.external.input;
 +
 +import java.io.IOException;
 +import java.util.Arrays;
 +import java.util.List;
 +import java.util.Map;
 +
 +import org.apache.asterix.common.exceptions.AsterixException;
 +import org.apache.asterix.external.api.AsterixInputStream;
++import org.apache.asterix.external.api.IExternalIndexer;
 +import org.apache.asterix.external.api.IIndexibleExternalDataSource;
- import org.apache.asterix.external.api.IInputStreamFactory;
 +import org.apache.asterix.external.api.IRecordReader;
 +import org.apache.asterix.external.api.IRecordReaderFactory;
 +import org.apache.asterix.external.indexing.ExternalFile;
 +import org.apache.asterix.external.indexing.IndexingScheduler;
++import org.apache.asterix.external.input.record.reader.IndexingStreamRecordReader;
 +import org.apache.asterix.external.input.record.reader.hdfs.HDFSRecordReader;
++import org.apache.asterix.external.input.record.reader.stream.StreamRecordReader;
 +import org.apache.asterix.external.input.stream.HDFSInputStream;
 +import org.apache.asterix.external.provider.ExternalIndexerProvider;
++import org.apache.asterix.external.provider.StreamRecordReaderProvider;
++import org.apache.asterix.external.provider.StreamRecordReaderProvider.Format;
++import org.apache.asterix.external.util.ExternalDataConstants;
 +import org.apache.asterix.external.util.ExternalDataUtils;
 +import org.apache.asterix.external.util.HDFSUtils;
 +import org.apache.hadoop.io.Writable;
 +import org.apache.hadoop.mapred.InputSplit;
 +import org.apache.hadoop.mapred.JobConf;
 +import org.apache.hadoop.mapred.RecordReader;
 +import org.apache.hadoop.mapred.Reporter;
 +import org.apache.hyracks.algebricks.common.constraints.AlgebricksAbsolutePartitionConstraint;
 +import org.apache.hyracks.api.context.IHyracksTaskContext;
 +import org.apache.hyracks.api.exceptions.HyracksDataException;
 +import org.apache.hyracks.hdfs.dataflow.ConfFactory;
 +import org.apache.hyracks.hdfs.dataflow.InputSplitsFactory;
 +import org.apache.hyracks.hdfs.scheduler.Scheduler;
 +
- public class HDFSDataSourceFactory
-         implements IInputStreamFactory, IRecordReaderFactory<Object>, IIndexibleExternalDataSource {
++public class HDFSDataSourceFactory implements IRecordReaderFactory<Object>, IIndexibleExternalDataSource {
 +
 +    protected static final long serialVersionUID = 1L;
 +    protected transient AlgebricksAbsolutePartitionConstraint clusterLocations;
 +    protected String[] readSchedule;
 +    protected boolean read[];
 +    protected InputSplitsFactory inputSplitsFactory;
 +    protected ConfFactory confFactory;
 +    protected boolean configured = false;
 +    protected static Scheduler hdfsScheduler;
 +    protected static IndexingScheduler indexingScheduler;
 +    protected static Boolean initialized = false;
 +    protected static Object initLock = new Object();
 +    protected List<ExternalFile> files;
 +    protected Map<String, String> configuration;
 +    protected Class<?> recordClass;
 +    protected boolean indexingOp = false;
 +    private JobConf conf;
 +    private InputSplit[] inputSplits;
 +    private String nodeName;
++    private Format format;
 +
 +    @Override
 +    public void configure(Map<String, String> configuration) throws AsterixException {
 +        try {
 +            init();
 +            this.configuration = configuration;
 +            JobConf conf = HDFSUtils.configureHDFSJobConf(configuration);
 +            confFactory = new ConfFactory(conf);
 +            clusterLocations = getPartitionConstraint();
 +            int numPartitions = clusterLocations.getLocations().length;
 +            // if files list was set, we restrict the splits to the list
 +            InputSplit[] inputSplits;
 +            if (files == null) {
 +                inputSplits = conf.getInputFormat().getSplits(conf, numPartitions);
 +            } else {
 +                inputSplits = HDFSUtils.getSplits(conf, files);
 +            }
 +            if (indexingOp) {
 +                readSchedule = indexingScheduler.getLocationConstraints(inputSplits);
 +            } else {
 +                readSchedule = hdfsScheduler.getLocationConstraints(inputSplits);
 +            }
 +            inputSplitsFactory = new InputSplitsFactory(inputSplits);
 +            read = new boolean[readSchedule.length];
 +            Arrays.fill(read, false);
-             if (!ExternalDataUtils.getDataSourceType(configuration).equals(DataSourceType.STREAM)) {
++            String formatString = configuration.get(ExternalDataConstants.KEY_FORMAT);
++            if (formatString == null || formatString.equals(ExternalDataConstants.FORMAT_HDFS_WRITABLE)) {
 +                RecordReader<?, ?> reader = conf.getInputFormat().getRecordReader(inputSplits[0], conf, Reporter.NULL);
 +                this.recordClass = reader.createValue().getClass();
 +                reader.close();
++            } else {
++                format = StreamRecordReaderProvider.getReaderFormat(configuration);
++                this.recordClass = char[].class;
 +            }
 +        } catch (IOException e) {
 +            throw new AsterixException(e);
 +        }
 +    }
 +
 +    // Used to tell the factory to restrict the splits to the intersection between this list a
 +    // actual files on hde
 +    @Override
 +    public void setSnapshot(List<ExternalFile> files, boolean indexingOp) {
 +        this.files = files;
 +        this.indexingOp = indexingOp;
 +    }
 +
 +    /*
 +     * The method below was modified to take care of the following
 +     * 1. when target files are not null, it generates a file aware input stream that validate
 +     * against the files
 +     * 2. if the data is binary, it returns a generic reader */
-     @Override
-     public AsterixInputStream createInputStream(IHyracksTaskContext ctx, int partition) throws HyracksDataException {
++    public AsterixInputStream createInputStream(IHyracksTaskContext ctx, int partition, IExternalIndexer indexer)
++            throws HyracksDataException {
 +        try {
 +            if (!configured) {
 +                conf = confFactory.getConf();
 +                inputSplits = inputSplitsFactory.getSplits();
 +                nodeName = ctx.getJobletContext().getApplicationContext().getNodeId();
 +                configured = true;
 +            }
-             return new HDFSInputStream(read, inputSplits, readSchedule, nodeName, conf, configuration, files);
++            return new HDFSInputStream(read, inputSplits, readSchedule, nodeName, conf, configuration, files, indexer);
 +        } catch (Exception e) {
 +            throw new HyracksDataException(e);
 +        }
 +    }
 +
 +    /**
 +     * Get the cluster locations for this input stream factory. This method specifies on which asterix nodes the
 +     * external
 +     * adapter will run and how many threads per node.
 +     *
 +     * @return
 +     */
 +    @Override
 +    public AlgebricksAbsolutePartitionConstraint getPartitionConstraint() {
 +        clusterLocations = HDFSUtils.getPartitionConstraints(clusterLocations);
 +        return clusterLocations;
 +    }
 +
 +    /**
 +     * This method initialize the scheduler which assigns responsibility of reading different logical input splits from
 +     * HDFS
 +     */
 +    private static void init() {
 +        if (!initialized) {
 +            synchronized (initLock) {
 +                if (!initialized) {
 +                    hdfsScheduler = HDFSUtils.initializeHDFSScheduler();
 +                    indexingScheduler = HDFSUtils.initializeIndexingHDFSScheduler();
 +                    initialized = true;
 +                }
 +            }
 +        }
 +    }
 +
 +    public JobConf getJobConf() throws HyracksDataException {
 +        return confFactory.getConf();
 +    }
 +
 +    @Override
 +    public DataSourceType getDataSourceType() {
 +        return ExternalDataUtils.getDataSourceType(configuration);
 +    }
 +
++    /**
++     * HDFS Datasource is a special case in two ways:
++     * 1. It supports indexing.
++     * 2. It returns input as a set of writable object that we sometimes internally transform into a byte stream
++     * Hence, it can produce:
++     * 1. StreamRecordReader: When we transform the input into a byte stream.
++     * 2. Indexing Stream Record Reader: When we transform the input into a byte stream and perform indexing.
++     * 3. HDFS Record Reader: When we simply pass the Writable object as it is to the parser.
++     */
 +    @Override
-     public IRecordReader<? extends Writable> createRecordReader(IHyracksTaskContext ctx, int partition)
++    public IRecordReader<? extends Object> createRecordReader(IHyracksTaskContext ctx, int partition)
 +            throws HyracksDataException {
 +        try {
++            IExternalIndexer indexer = files == null ? null : ExternalIndexerProvider.getIndexer(configuration);
++            if (format != null) {
++                StreamRecordReader streamReader = StreamRecordReaderProvider.createRecordReader(format,
++                        createInputStream(ctx, partition, indexer), configuration);
++                if (indexer != null) {
++                    return new IndexingStreamRecordReader(streamReader, indexer);
++                } else {
++                    return streamReader;
++                }
++            }
 +            JobConf conf = confFactory.getConf();
 +            InputSplit[] inputSplits = inputSplitsFactory.getSplits();
 +            String nodeName = ctx.getJobletContext().getApplicationContext().getNodeId();
 +            return new HDFSRecordReader<Object, Writable>(read, inputSplits, readSchedule, nodeName, conf, files,
-                     files == null ? null : ExternalIndexerProvider.getIndexer(configuration));
++                    indexer);
 +        } catch (Exception e) {
 +            throw new HyracksDataException(e);
 +        }
 +    }
 +
 +    @Override
 +    public Class<?> getRecordClass() {
 +        return recordClass;
 +    }
 +
 +    @Override
 +    public boolean isIndexible() {
 +        return true;
 +    }
 +
 +    @Override
 +    public boolean isIndexingOp() {
 +        return ((files != null) && indexingOp);
 +    }
 +}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/stream/EmptyLineSeparatedRecordReader.java
----------------------------------------------------------------------
diff --cc asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/stream/EmptyLineSeparatedRecordReader.java
index 6964a82,0000000..aa0451a
mode 100644,000000..100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/stream/EmptyLineSeparatedRecordReader.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/stream/EmptyLineSeparatedRecordReader.java
@@@ -1,122 -1,0 +1,121 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +package org.apache.asterix.external.input.record.reader.stream;
 +
 +import java.io.IOException;
 +
 +import org.apache.asterix.external.api.AsterixInputStream;
- import org.apache.asterix.external.api.IExternalIndexer;
 +import org.apache.asterix.external.util.ExternalDataConstants;
 +
- public class EmptyLineSeparatedRecordReader extends AbstractStreamRecordReader {
++public class EmptyLineSeparatedRecordReader extends StreamRecordReader {
 +
-     public EmptyLineSeparatedRecordReader(AsterixInputStream inputStream, IExternalIndexer indexer) {
-         super(inputStream, indexer);
++    public EmptyLineSeparatedRecordReader(AsterixInputStream inputStream) {
++        super(inputStream);
 +    }
 +
 +    private boolean prevCharCR;
 +    private boolean prevCharLF;
 +    private int newlineLength;
 +    private int readLength;
 +
 +    @Override
 +    public boolean hasNext() throws IOException {
 +        if (done) {
 +            return false;
 +        }
 +        if (!skipWhiteSpace()) {
 +            done = true;
 +            close();
 +            return false;
 +        }
 +        newlineLength = 0;
 +        prevCharCR = false;
 +        prevCharLF = false;
 +        record.reset();
 +        readLength = 0;
 +        do {
 +            int startPosn = bufferPosn; //starting from where we left off the last time
 +            if (bufferPosn >= bufferLength) {
 +                startPosn = bufferPosn = 0;
 +                bufferLength = reader.read(inputBuffer);
 +                if (bufferLength <= 0) {
 +                    if (readLength > 0) {
 +                        record.endRecord();
 +                        return true;
 +                    }
 +                    close();
 +                    return false; //EOF
 +                }
 +            }
 +            for (; bufferPosn < bufferLength; ++bufferPosn) { //search for two consecutive newlines
 +                if (inputBuffer[bufferPosn] == ExternalDataConstants.LF) {
 +                    if (prevCharLF) {
 +                        // \n\n
 +                        ++bufferPosn; // at next invocation proceed from following byte
 +                        newlineLength = 2;
 +                        break;
 +                    } else if (prevCharCR) {
 +                        newlineLength += 1;
 +                    }
 +                    prevCharLF = true;
 +                } else {
 +                    prevCharLF = false;
 +                }
 +                if (inputBuffer[bufferPosn] == ExternalDataConstants.CR) { //CR + notLF, we are at notLF
 +                    if (prevCharCR) {
 +                        // \cr\cr
 +                        newlineLength = 2;
 +                        break;
 +                    }
 +                    prevCharCR = true;
 +                } else {
 +                    prevCharCR = false;
 +                }
 +                if (!(prevCharCR || prevCharLF)) {
 +                    newlineLength = 0;
 +                }
 +            }
 +            readLength = bufferPosn - startPosn;
 +            if (readLength > 0) {
 +                record.append(inputBuffer, startPosn, readLength);
 +            }
 +        } while (newlineLength < 2);
 +        record.endRecord();
 +        return true;
 +    }
 +
 +    private boolean skipWhiteSpace() throws IOException {
 +        // start by skipping white spaces
 +        while (true) {
 +            if (bufferPosn < bufferLength) {
 +                if (!Character.isWhitespace(inputBuffer[bufferPosn])) {
 +                    return true;
 +                }
 +                bufferPosn++;
 +            } else {
 +                // fill buffer
 +                bufferPosn = 0;
 +                bufferLength = reader.read(inputBuffer);
 +                if (bufferLength < 0) {
 +                    return false;
 +                }
 +            }
 +        }
 +    }
 +}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/stream/LineRecordReader.java
----------------------------------------------------------------------
diff --cc asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/stream/LineRecordReader.java
index 3089295,0000000..59b72e4
mode 100644,000000..100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/stream/LineRecordReader.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/stream/LineRecordReader.java
@@@ -1,114 -1,0 +1,121 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +package org.apache.asterix.external.input.record.reader.stream;
 +
 +import java.io.IOException;
 +
 +import org.apache.asterix.external.api.AsterixInputStream;
- import org.apache.asterix.external.api.IExternalIndexer;
 +import org.apache.asterix.external.util.ExternalDataConstants;
 +import org.apache.hyracks.api.exceptions.HyracksDataException;
 +
- public class LineRecordReader extends AbstractStreamRecordReader {
++public class LineRecordReader extends StreamRecordReader {
 +
++    private final boolean hasHeader;
 +    protected boolean prevCharCR;
 +    protected int newlineLength;
 +    protected int recordNumber = 0;
++    protected boolean nextIsHeader = false;
 +
-     public LineRecordReader(final boolean hasHeader, final AsterixInputStream stream, final IExternalIndexer indexer)
-             throws HyracksDataException {
-         super(stream, indexer);
-         try {
-             if (hasHeader) {
-                 if (hasNext()) {
-                     next();
-                 }
-             }
-         } catch (final IOException e) {
-             throw new HyracksDataException(e);
++    public LineRecordReader(final boolean hasHeader, final AsterixInputStream stream) throws HyracksDataException {
++        super(stream);
++        this.hasHeader = hasHeader;
++        if (hasHeader) {
++            stream.setNotificationHandler(this);
 +        }
++    }
 +
++    @Override
++    public void notifyNewSource() {
++        if (hasHeader) {
++            nextIsHeader = true;
++        }
 +    }
 +
 +    @Override
 +    public boolean hasNext() throws IOException {
-         if (done) {
-             return false;
-         }
-         /*
-          * We're reading data from in, but the head of the stream may be
-          * already buffered in buffer, so we have several cases:
-          * 1. No newline characters are in the buffer, so we need to copy
-          *   everything and read another buffer from the stream.
-          * 2. An unambiguously terminated line is in buffer, so we just
-          *    copy to record.
-          * 3. Ambiguously terminated line is in buffer, i.e. buffer ends
-          *    in CR. In this case we copy everything up to CR to record, but
-          * we also need to see what follows CR: if it's LF, then we
-          * need consume LF as well, so next call to readLine will read
-          * from after that.
-          * We use a flag prevCharCR to signal if previous character was CR
-          * and, if it happens to be at the end of the buffer, delay
-          * consuming it until we have a chance to look at the char that
-          * follows.
-          */
-         newlineLength = 0; //length of terminating newline
-         prevCharCR = false; //true of prev char was CR
-         record.reset();
-         int readLength = 0;
-         do {
-             int startPosn = bufferPosn; //starting from where we left off the last time
-             if (bufferPosn >= bufferLength) {
-                 startPosn = bufferPosn = 0;
-                 bufferLength = reader.read(inputBuffer);
-                 if (bufferLength <= 0) {
-                     if (readLength > 0) {
-                         record.endRecord();
-                         recordNumber++;
-                         return true;
++        while (true) {
++            if (done) {
++                return false;
++            }
++            /*
++             * We're reading data from in, but the head of the stream may be
++             * already buffered in buffer, so we have several cases:
++             * 1. No newline characters are in the buffer, so we need to copy
++             *   everything and read another buffer from the stream.
++             * 2. An unambiguously terminated line is in buffer, so we just
++             *    copy to record.
++             * 3. Ambiguously terminated line is in buffer, i.e. buffer ends
++             *    in CR. In this case we copy everything up to CR to record, but
++             * we also need to see what follows CR: if it's LF, then we
++             * need consume LF as well, so next call to readLine will read
++             * from after that.
++             * We use a flag prevCharCR to signal if previous character was CR
++             * and, if it happens to be at the end of the buffer, delay
++             * consuming it until we have a chance to look at the char that
++             * follows.
++             */
++            newlineLength = 0; //length of terminating newline
++            prevCharCR = false; //true of prev char was CR
++            record.reset();
++            int readLength = 0;
++            do {
++                int startPosn = bufferPosn; //starting from where we left off the last time
++                if (bufferPosn >= bufferLength) {
++                    startPosn = bufferPosn = 0;
++                    bufferLength = reader.read(inputBuffer);
++                    if (bufferLength <= 0) {
++                        if (readLength > 0) {
++                            record.endRecord();
++                            recordNumber++;
++                            return true;
++                        }
++                        close();
++                        return false; //EOF
 +                    }
-                     close();
-                     return false; //EOF
 +                }
-             }
-             for (; bufferPosn < bufferLength; ++bufferPosn) { //search for newline
-                 if (inputBuffer[bufferPosn] == ExternalDataConstants.LF) {
-                     newlineLength = (prevCharCR) ? 2 : 1;
-                     ++bufferPosn; // at next invocation proceed from following byte
-                     break;
++                for (; bufferPosn < bufferLength; ++bufferPosn) { //search for newline
++                    if (inputBuffer[bufferPosn] == ExternalDataConstants.LF) {
++                        newlineLength = (prevCharCR) ? 2 : 1;
++                        ++bufferPosn; // at next invocation proceed from following byte
++                        break;
++                    }
++                    if (prevCharCR) { //CR + notLF, we are at notLF
++                        newlineLength = 1;
++                        break;
++                    }
++                    prevCharCR = (inputBuffer[bufferPosn] == ExternalDataConstants.CR);
 +                }
-                 if (prevCharCR) { //CR + notLF, we are at notLF
-                     newlineLength = 1;
-                     break;
++                readLength = bufferPosn - startPosn;
++                if (prevCharCR && newlineLength == 0) {
++                    --readLength; //CR at the end of the buffer
++                    prevCharCR = false;
 +                }
-                 prevCharCR = (inputBuffer[bufferPosn] == ExternalDataConstants.CR);
-             }
-             readLength = bufferPosn - startPosn;
-             if (prevCharCR && newlineLength == 0) {
-                 --readLength; //CR at the end of the buffer
-                 prevCharCR = false;
-             }
-             if (readLength > 0) {
-                 record.append(inputBuffer, startPosn, readLength);
++                if (readLength > 0) {
++                    record.append(inputBuffer, startPosn, readLength);
++                }
++            } while (newlineLength == 0);
++            if (nextIsHeader) {
++                nextIsHeader = false;
++                continue;
 +            }
-         } while (newlineLength == 0);
-         recordNumber++;
-         return true;
++            recordNumber++;
++            return true;
++        }
 +    }
 +}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/stream/QuotedLineRecordReader.java
----------------------------------------------------------------------
diff --cc asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/stream/QuotedLineRecordReader.java
index abd2952,0000000..88964a1
mode 100644,000000..100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/stream/QuotedLineRecordReader.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/stream/QuotedLineRecordReader.java
@@@ -1,119 -1,0 +1,125 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +package org.apache.asterix.external.input.record.reader.stream;
 +
 +import java.io.IOException;
 +
 +import org.apache.asterix.external.api.AsterixInputStream;
 +import org.apache.asterix.external.api.IExternalIndexer;
 +import org.apache.asterix.external.util.ExternalDataConstants;
 +import org.apache.asterix.external.util.ExternalDataExceptionUtils;
 +import org.apache.hyracks.api.exceptions.HyracksDataException;
 +
 +public class QuotedLineRecordReader extends LineRecordReader {
 +
 +    private final char quote;
 +    private boolean prevCharEscape;
 +    private boolean inQuote;
 +
-     public QuotedLineRecordReader(final boolean hasHeader, final AsterixInputStream stream,
-             final IExternalIndexer indexer, final String quoteString) throws HyracksDataException {
-         super(hasHeader, stream, indexer);
++    public QuotedLineRecordReader(final boolean hasHeader, final AsterixInputStream stream, final String quoteString)
++            throws HyracksDataException {
++        super(hasHeader, stream);
 +        if ((quoteString == null) || (quoteString.length() != 1)) {
 +            throw new HyracksDataException(ExternalDataExceptionUtils.incorrectParameterMessage(
 +                    ExternalDataConstants.KEY_QUOTE, ExternalDataConstants.PARAMETER_OF_SIZE_ONE, quoteString));
 +        }
 +        this.quote = quoteString.charAt(0);
 +    }
 +
 +    @Override
 +    public boolean hasNext() throws IOException {
-         if (done) {
-             return false;
-         }
-         newlineLength = 0;
-         prevCharCR = false;
-         prevCharEscape = false;
-         record.reset();
-         int readLength = 0;
-         inQuote = false;
-         do {
-             int startPosn = bufferPosn;
-             if (bufferPosn >= bufferLength) {
-                 startPosn = bufferPosn = 0;
-                 bufferLength = reader.read(inputBuffer);
-                 if (bufferLength <= 0) {
-                     {
-                         if (readLength > 0) {
-                             if (inQuote) {
-                                 throw new IOException("malformed input record ended inside quote");
++        while (true) {
++            if (done) {
++                return false;
++            }
++            newlineLength = 0;
++            prevCharCR = false;
++            prevCharEscape = false;
++            record.reset();
++            int readLength = 0;
++            inQuote = false;
++            do {
++                int startPosn = bufferPosn;
++                if (bufferPosn >= bufferLength) {
++                    startPosn = bufferPosn = 0;
++                    bufferLength = reader.read(inputBuffer);
++                    if (bufferLength <= 0) {
++                        {
++                            if (readLength > 0) {
++                                if (inQuote) {
++                                    throw new IOException("malformed input record ended inside quote");
++                                }
++                                record.endRecord();
++                                recordNumber++;
++                                return true;
 +                            }
-                             record.endRecord();
-                             recordNumber++;
-                             return true;
++                            close();
++                            return false;
 +                        }
-                         close();
-                         return false;
 +                    }
 +                }
-             }
-             for (; bufferPosn < bufferLength; ++bufferPosn) {
-                 if (!inQuote) {
-                     if (inputBuffer[bufferPosn] == ExternalDataConstants.LF) {
-                         newlineLength = (prevCharCR) ? 2 : 1;
-                         ++bufferPosn;
-                         break;
-                     }
-                     if (prevCharCR) {
-                         newlineLength = 1;
-                         break;
-                     }
-                     prevCharCR = (inputBuffer[bufferPosn] == ExternalDataConstants.CR);
-                     if (inputBuffer[bufferPosn] == quote) {
-                         if (!prevCharEscape) {
-                             inQuote = true;
++                for (; bufferPosn < bufferLength; ++bufferPosn) {
++                    if (!inQuote) {
++                        if (inputBuffer[bufferPosn] == ExternalDataConstants.LF) {
++                            newlineLength = (prevCharCR) ? 2 : 1;
++                            ++bufferPosn;
++                            break;
++                        }
++                        if (prevCharCR) {
++                            newlineLength = 1;
++                            break;
++                        }
++                        prevCharCR = (inputBuffer[bufferPosn] == ExternalDataConstants.CR);
++                        if (inputBuffer[bufferPosn] == quote) {
++                            if (!prevCharEscape) {
++                                inQuote = true;
++                            }
++                        }
++                        if (prevCharEscape) {
++                            prevCharEscape = false;
++                        } else {
++                            prevCharEscape = inputBuffer[bufferPosn] == ExternalDataConstants.ESCAPE;
 +                        }
-                     }
-                     if (prevCharEscape) {
-                         prevCharEscape = false;
 +                    } else {
-                         prevCharEscape = inputBuffer[bufferPosn] == ExternalDataConstants.ESCAPE;
-                     }
-                 } else {
-                     // only look for next quote
-                     if (inputBuffer[bufferPosn] == quote) {
-                         if (!prevCharEscape) {
-                             inQuote = false;
++                        // only look for next quote
++                        if (inputBuffer[bufferPosn] == quote) {
++                            if (!prevCharEscape) {
++                                inQuote = false;
++                            }
 +                        }
++                        prevCharEscape = inputBuffer[bufferPosn] == ExternalDataConstants.ESCAPE;
 +                    }
-                     prevCharEscape = inputBuffer[bufferPosn] == ExternalDataConstants.ESCAPE;
 +                }
++                readLength = bufferPosn - startPosn;
++                if (prevCharCR && newlineLength == 0) {
++                    --readLength;
++                }
++                if (readLength > 0) {
++                    record.append(inputBuffer, startPosn, readLength);
++                }
++            } while (newlineLength == 0);
++            if (nextIsHeader) {
++                nextIsHeader = false;
++                continue;
 +            }
-             readLength = bufferPosn - startPosn;
-             if (prevCharCR && newlineLength == 0) {
-                 --readLength;
-             }
-             if (readLength > 0) {
-                 record.append(inputBuffer, startPosn, readLength);
-             }
-         } while (newlineLength == 0);
-         recordNumber++;
-         return true;
++            recordNumber++;
++            return true;
++        }
 +    }
 +}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/stream/SemiStructuredRecordReader.java
----------------------------------------------------------------------
diff --cc asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/stream/SemiStructuredRecordReader.java
index 7339bfd,0000000..26ac3cb
mode 100644,000000..100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/stream/SemiStructuredRecordReader.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/stream/SemiStructuredRecordReader.java
@@@ -1,164 -1,0 +1,164 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +package org.apache.asterix.external.input.record.reader.stream;
 +
 +import java.io.IOException;
 +
- import org.apache.asterix.common.exceptions.AsterixException;
 +import org.apache.asterix.external.api.AsterixInputStream;
 +import org.apache.asterix.external.api.IExternalIndexer;
 +import org.apache.asterix.external.util.ExternalDataConstants;
 +import org.apache.asterix.external.util.ExternalDataExceptionUtils;
++import org.apache.hyracks.api.exceptions.HyracksDataException;
 +
- public class SemiStructuredRecordReader extends AbstractStreamRecordReader {
++public class SemiStructuredRecordReader extends StreamRecordReader {
 +
 +    private int depth;
 +    private boolean prevCharEscape;
 +    private boolean inString;
 +    private char recordStart;
 +    private char recordEnd;
 +    private int recordNumber = 0;
 +
-     public SemiStructuredRecordReader(AsterixInputStream stream, IExternalIndexer indexer, String recStartString,
-             String recEndString) throws AsterixException {
-         super(stream, indexer);
++    public SemiStructuredRecordReader(AsterixInputStream stream, String recStartString, String recEndString)
++            throws HyracksDataException {
++        super(stream);
 +        // set record opening char
 +        if (recStartString != null) {
 +            if (recStartString.length() != 1) {
-                 throw new AsterixException(
++                throw new HyracksDataException(
 +                        ExternalDataExceptionUtils.incorrectParameterMessage(ExternalDataConstants.KEY_RECORD_START,
 +                                ExternalDataConstants.PARAMETER_OF_SIZE_ONE, recStartString));
 +            }
 +            recordStart = recStartString.charAt(0);
 +        } else {
 +            recordStart = ExternalDataConstants.DEFAULT_RECORD_START;
 +        }
 +        // set record ending char
 +        if (recEndString != null) {
 +            if (recEndString.length() != 1) {
-                 throw new AsterixException(
++                throw new HyracksDataException(
 +                        ExternalDataExceptionUtils.incorrectParameterMessage(ExternalDataConstants.KEY_RECORD_END,
 +                                ExternalDataConstants.PARAMETER_OF_SIZE_ONE, recEndString));
 +            }
 +            recordEnd = recEndString.charAt(0);
 +        } else {
 +            recordEnd = ExternalDataConstants.DEFAULT_RECORD_END;
 +        }
 +    }
 +
 +    public int getRecordNumber() {
 +        return recordNumber;
 +    }
 +
 +    @Override
-     public boolean hasNext() throws Exception {
++    public boolean hasNext() throws IOException {
 +        if (done) {
 +            return false;
 +        }
 +        record.reset();
 +        boolean hasStarted = false;
 +        boolean hasFinished = false;
 +        prevCharEscape = false;
 +        inString = false;
 +        depth = 0;
 +        do {
 +            int startPosn = bufferPosn; // starting from where we left off the last time
 +            if (bufferPosn >= bufferLength) {
 +                startPosn = bufferPosn = 0;
 +                bufferLength = reader.read(inputBuffer);
 +                if (bufferLength < 0) {
 +                    close();
 +                    return false; // EOF
 +                }
 +            }
 +            if (!hasStarted) {
 +                for (; bufferPosn < bufferLength; ++bufferPosn) { // search for record begin
 +                    if (inputBuffer[bufferPosn] == recordStart) {
 +                        startPosn = bufferPosn;
 +                        hasStarted = true;
 +                        depth = 1;
 +                        ++bufferPosn; // at next invocation proceed from following byte
 +                        break;
 +                    } else if (inputBuffer[bufferPosn] != ExternalDataConstants.SPACE
 +                            && inputBuffer[bufferPosn] != ExternalDataConstants.TAB
 +                            && inputBuffer[bufferPosn] != ExternalDataConstants.LF
 +                            && inputBuffer[bufferPosn] != ExternalDataConstants.CR) {
 +                        // corrupted file. clear the buffer and stop reading
 +                        reader.reset();
 +                        bufferPosn = bufferLength = 0;
 +                        throw new IOException("Malformed input stream");
 +                    }
 +                }
 +            }
 +            if (hasStarted) {
 +                for (; bufferPosn < bufferLength; ++bufferPosn) { // search for record begin
 +                    if (inString) {
 +                        // we are in a string, we only care about the string end
 +                        if (inputBuffer[bufferPosn] == ExternalDataConstants.QUOTE && !prevCharEscape) {
 +                            inString = false;
 +                        }
 +                        if (prevCharEscape) {
 +                            prevCharEscape = false;
 +                        } else {
 +                            prevCharEscape = inputBuffer[bufferPosn] == ExternalDataConstants.ESCAPE;
 +                        }
 +                    } else {
 +                        if (inputBuffer[bufferPosn] == ExternalDataConstants.QUOTE) {
 +                            inString = true;
 +                        } else if (inputBuffer[bufferPosn] == recordStart) {
 +                            depth += 1;
 +                        } else if (inputBuffer[bufferPosn] == recordEnd) {
 +                            depth -= 1;
 +                            if (depth == 0) {
 +                                hasFinished = true;
 +                                ++bufferPosn; // at next invocation proceed from following byte
 +                                break;
 +                            }
 +                        }
 +                    }
 +                }
 +            }
 +
 +            int appendLength = bufferPosn - startPosn;
 +            if (appendLength > 0) {
 +                try {
 +                    record.append(inputBuffer, startPosn, appendLength);
 +                } catch (IOException e) {
 +                    reader.reset();
 +                    bufferPosn = bufferLength = 0;
 +                    throw new IOException("Malformed input stream");
 +                }
 +            }
 +        } while (!hasFinished);
 +        record.endRecord();
 +        recordNumber++;
 +        return true;
 +    }
 +
 +    @Override
 +    public boolean stop() {
 +        try {
 +            reader.stop();
 +        } catch (Exception e) {
 +            e.printStackTrace();
 +            return false;
 +        }
 +        return true;
 +    }
 +}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/twitter/TwitterRecordReaderFactory.java
----------------------------------------------------------------------
diff --cc asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/twitter/TwitterRecordReaderFactory.java
index 7ca185f,0000000..541737a
mode 100644,000000..100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/twitter/TwitterRecordReaderFactory.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/twitter/TwitterRecordReaderFactory.java
@@@ -1,145 -1,0 +1,150 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +package org.apache.asterix.external.input.record.reader.twitter;
 +
 +import java.util.Map;
 +import java.util.logging.Level;
 +import java.util.logging.Logger;
 +
 +import org.apache.asterix.common.exceptions.AsterixException;
 +import org.apache.asterix.external.api.IExternalDataSourceFactory;
 +import org.apache.asterix.external.api.IRecordReader;
 +import org.apache.asterix.external.api.IRecordReaderFactory;
 +import org.apache.asterix.external.util.ExternalDataConstants;
- import org.apache.asterix.external.util.ExternalDataUtils;
 +import org.apache.asterix.external.util.TwitterUtil;
 +import org.apache.asterix.external.util.TwitterUtil.AuthenticationConstants;
 +import org.apache.asterix.external.util.TwitterUtil.SearchAPIConstants;
 +import org.apache.hyracks.algebricks.common.constraints.AlgebricksAbsolutePartitionConstraint;
 +import org.apache.hyracks.api.context.IHyracksTaskContext;
 +import org.apache.hyracks.api.exceptions.HyracksDataException;
 +
 +import twitter4j.FilterQuery;
 +import twitter4j.Status;
 +
 +public class TwitterRecordReaderFactory implements IRecordReaderFactory<Status> {
 +
 +    private static final long serialVersionUID = 1L;
 +    private static final Logger LOGGER = Logger.getLogger(TwitterRecordReaderFactory.class.getName());
 +
 +    private static final String DEFAULT_INTERVAL = "10"; // 10 seconds
 +    private static final int INTAKE_CARDINALITY = 1; // degree of parallelism at intake stage
 +
 +    private Map<String, String> configuration;
 +    private boolean pull;
 +    private transient AlgebricksAbsolutePartitionConstraint clusterLocations;
 +
 +    @Override
 +    public DataSourceType getDataSourceType() {
 +        return DataSourceType.RECORDS;
 +    }
 +
 +    @Override
 +    public AlgebricksAbsolutePartitionConstraint getPartitionConstraint() {
 +        clusterLocations = IExternalDataSourceFactory.getPartitionConstraints(clusterLocations, INTAKE_CARDINALITY);
 +        return clusterLocations;
 +    }
 +
 +    @Override
 +    public void configure(Map<String, String> configuration) throws AsterixException {
 +        this.configuration = configuration;
 +        TwitterUtil.initializeConfigurationWithAuthInfo(configuration);
 +        if (!validateConfiguration(configuration)) {
 +            StringBuilder builder = new StringBuilder();
 +            builder.append("One or more parameters are missing from adapter configuration\n");
 +            builder.append(AuthenticationConstants.OAUTH_CONSUMER_KEY + "\n");
 +            builder.append(AuthenticationConstants.OAUTH_CONSUMER_SECRET + "\n");
 +            builder.append(AuthenticationConstants.OAUTH_ACCESS_TOKEN + "\n");
-             builder.append(AuthenticationConstants.OAUTH_ACCESS_TOKEN_SECRET + "\n");
++            builder.append(AuthenticationConstants.OAUTH_ACCESS_TOKEN_SECRET);
 +            throw new AsterixException(builder.toString());
 +        }
-         if (ExternalDataUtils.isPull(configuration)) {
++        if (TwitterRecordReaderFactory.isTwitterPull(configuration)) {
 +            pull = true;
 +            if (configuration.get(SearchAPIConstants.QUERY) == null) {
 +                throw new AsterixException(
 +                        "parameter " + SearchAPIConstants.QUERY + " not specified as part of adaptor configuration");
 +            }
 +            String interval = configuration.get(SearchAPIConstants.INTERVAL);
 +            if (interval != null) {
 +                try {
 +                    Integer.parseInt(interval);
 +                } catch (NumberFormatException nfe) {
 +                    throw new IllegalArgumentException(
 +                            "parameter " + SearchAPIConstants.INTERVAL + " is defined incorrectly, expecting a number");
 +                }
 +            } else {
 +                configuration.put(SearchAPIConstants.INTERVAL, DEFAULT_INTERVAL);
 +                if (LOGGER.isLoggable(Level.WARNING)) {
 +                    LOGGER.warning(" Parameter " + SearchAPIConstants.INTERVAL + " not defined, using default ("
 +                            + DEFAULT_INTERVAL + ")");
 +                }
 +            }
-         } else if (ExternalDataUtils.isPush(configuration)) {
-             pull = false;
 +        } else {
-             throw new AsterixException("One of boolean parameters " + ExternalDataConstants.KEY_PULL + " and "
-                     + ExternalDataConstants.KEY_PUSH + " must be specified as part of adaptor configuration");
++            pull = false;
++        }
++    }
++
++    public static boolean isTwitterPull(Map<String, String> configuration) {
++        String reader = configuration.get(ExternalDataConstants.KEY_READER);
++        if (reader.equals(ExternalDataConstants.READER_TWITTER_PULL)
++                || reader.equals(ExternalDataConstants.READER_PULL_TWITTER)) {
++            return true;
 +        }
++        return false;
 +    }
 +
 +    @Override
 +    public boolean isIndexible() {
 +        return false;
 +    }
 +
 +    @Override
 +    public IRecordReader<? extends Status> createRecordReader(IHyracksTaskContext ctx, int partition)
 +            throws HyracksDataException {
 +        if (pull) {
 +            return new TwitterPullRecordReader(TwitterUtil.getTwitterService(configuration),
 +                    configuration.get(SearchAPIConstants.QUERY),
 +                    Integer.parseInt(configuration.get(SearchAPIConstants.INTERVAL)));
 +        } else {
 +            FilterQuery query;
 +            try {
 +                query = TwitterUtil.getFilterQuery(configuration);
 +                return (query == null) ? new TwitterPushRecordReader(TwitterUtil.getTwitterStream(configuration))
 +                        : new TwitterPushRecordReader(TwitterUtil.getTwitterStream(configuration), query);
 +            } catch (AsterixException e) {
 +                throw new HyracksDataException(e);
 +            }
 +        }
 +    }
 +
 +    @Override
 +    public Class<? extends Status> getRecordClass() {
 +        return Status.class;
 +    }
 +
 +    private boolean validateConfiguration(Map<String, String> configuration) {
 +        String consumerKey = configuration.get(AuthenticationConstants.OAUTH_CONSUMER_KEY);
 +        String consumerSecret = configuration.get(AuthenticationConstants.OAUTH_CONSUMER_SECRET);
 +        String accessToken = configuration.get(AuthenticationConstants.OAUTH_ACCESS_TOKEN);
 +        String tokenSecret = configuration.get(AuthenticationConstants.OAUTH_ACCESS_TOKEN_SECRET);
 +        if ((consumerKey == null) || (consumerSecret == null) || (accessToken == null) || (tokenSecret == null)) {
 +            return false;
 +        }
 +        return true;
 +    }
 +}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/AsterixInputStreamReader.java
----------------------------------------------------------------------
diff --cc asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/AsterixInputStreamReader.java
index 7e280a5,0000000..94333d1
mode 100644,000000..100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/AsterixInputStreamReader.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/AsterixInputStreamReader.java
@@@ -1,120 -1,0 +1,121 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +package org.apache.asterix.external.input.stream;
 +
 +import java.io.IOException;
 +import java.io.Reader;
 +import java.nio.ByteBuffer;
 +import java.nio.CharBuffer;
 +import java.nio.charset.CharsetDecoder;
 +import java.nio.charset.StandardCharsets;
 +
 +import org.apache.asterix.external.api.AsterixInputStream;
 +import org.apache.asterix.external.dataflow.AbstractFeedDataFlowController;
 +import org.apache.asterix.external.util.ExternalDataConstants;
 +import org.apache.asterix.external.util.FeedLogManager;
++import org.apache.hyracks.api.exceptions.HyracksDataException;
 +
 +public class AsterixInputStreamReader extends Reader {
 +    private AsterixInputStream in;
 +    private byte[] bytes = new byte[ExternalDataConstants.DEFAULT_BUFFER_SIZE];
 +    private ByteBuffer byteBuffer = ByteBuffer.wrap(bytes);
 +    private CharBuffer charBuffer = CharBuffer.allocate(ExternalDataConstants.DEFAULT_BUFFER_SIZE);
 +    private CharsetDecoder decoder;
 +    private boolean done = false;
 +
 +    public AsterixInputStreamReader(AsterixInputStream in) {
 +        this.in = in;
 +        this.decoder = StandardCharsets.UTF_8.newDecoder();
 +        this.byteBuffer.flip();
 +    }
 +
 +    public void stop() throws IOException {
 +        try {
 +            in.stop();
 +        } catch (Exception e) {
 +            throw new IOException(e);
 +        }
 +    }
 +
 +    public void setController(AbstractFeedDataFlowController controller) {
 +        in.setController(controller);
 +    }
 +
-     public void setFeedLogManager(FeedLogManager feedLogManager) {
++    public void setFeedLogManager(FeedLogManager feedLogManager) throws HyracksDataException {
 +        in.setFeedLogManager(feedLogManager);
 +    }
 +
 +    @Override
 +    public int read(char cbuf[]) throws IOException {
 +        return read(cbuf, 0, cbuf.length);
 +    }
 +
 +    @Override
 +    public int read(char cbuf[], int offset, int length) throws IOException {
 +        if (done) {
 +            return -1;
 +        }
 +        int len = 0;
 +        charBuffer.clear();
 +        while (charBuffer.position() == 0) {
 +            if (byteBuffer.hasRemaining()) {
 +                decoder.decode(byteBuffer, charBuffer, false);
 +                System.arraycopy(charBuffer.array(), 0, cbuf, offset, charBuffer.position());
 +                if (charBuffer.position() > 0) {
 +                    return charBuffer.position();
 +                } else {
 +                    // need to read more data
 +                    System.arraycopy(bytes, byteBuffer.position(), bytes, 0, byteBuffer.remaining());
 +                    byteBuffer.position(byteBuffer.remaining());
 +                    while (len == 0) {
 +                        len = in.read(bytes, byteBuffer.position(), bytes.length - byteBuffer.position());
 +                    }
 +                }
 +            } else {
 +                byteBuffer.clear();
 +                while (len == 0) {
 +                    len = in.read(bytes, 0, bytes.length);
 +                }
 +            }
 +            if (len == -1) {
 +                done = true;
 +                return len;
 +            }
 +            byteBuffer.position(len);
 +            byteBuffer.flip();
 +            decoder.decode(byteBuffer, charBuffer, false);
 +            System.arraycopy(charBuffer.array(), 0, cbuf, offset, charBuffer.position());
 +        }
 +        return charBuffer.position();
 +    }
 +
 +    @Override
 +    public void close() throws IOException {
 +        in.close();
 +    }
 +
 +    public boolean handleException(Throwable th) {
 +        return in.handleException(th);
 +    }
 +
 +    @Override
 +    public void reset() throws IOException {
 +        byteBuffer.limit(0);
 +    }
 +}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/HDFSInputStream.java
----------------------------------------------------------------------
diff --cc asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/HDFSInputStream.java
index 063b8fa,0000000..997c254
mode 100644,000000..100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/HDFSInputStream.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/HDFSInputStream.java
@@@ -1,237 -1,0 +1,234 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +package org.apache.asterix.external.input.stream;
 +
 +import java.io.IOException;
 +import java.util.List;
 +import java.util.Map;
 +
 +import org.apache.asterix.common.exceptions.AsterixException;
 +import org.apache.asterix.external.api.AsterixInputStream;
 +import org.apache.asterix.external.api.IExternalIndexer;
 +import org.apache.asterix.external.api.IIndexingDatasource;
 +import org.apache.asterix.external.indexing.ExternalFile;
 +import org.apache.asterix.external.input.record.reader.hdfs.EmptyRecordReader;
- import org.apache.asterix.external.provider.ExternalIndexerProvider;
 +import org.apache.asterix.external.util.ExternalDataConstants;
 +import org.apache.hadoop.fs.FileStatus;
 +import org.apache.hadoop.fs.FileSystem;
 +import org.apache.hadoop.fs.Path;
 +import org.apache.hadoop.io.Text;
 +import org.apache.hadoop.io.Writable;
 +import org.apache.hadoop.mapred.FileSplit;
 +import org.apache.hadoop.mapred.InputFormat;
 +import org.apache.hadoop.mapred.InputSplit;
 +import org.apache.hadoop.mapred.JobConf;
 +import org.apache.hadoop.mapred.RecordReader;
 +import org.apache.hadoop.mapred.Reporter;
 +import org.apache.hyracks.api.exceptions.HyracksDataException;
 +
 +public class HDFSInputStream extends AsterixInputStream implements IIndexingDatasource {
 +
 +    private RecordReader<Object, Text> reader;
 +    private Text value = null;
 +    private Object key = null;
 +    private int currentSplitIndex = 0;
 +    private boolean read[];
 +    private InputFormat<?, Text> inputFormat;
 +    private InputSplit[] inputSplits;
 +    private String[] readSchedule;
 +    private String nodeName;
 +    private JobConf conf;
 +    // Indexing variables
 +    private final IExternalIndexer indexer;
 +    private final List<ExternalFile> snapshot;
 +    private final FileSystem hdfs;
 +    private int pos = 0;
 +
 +    @SuppressWarnings("unchecked")
 +    public HDFSInputStream(boolean read[], InputSplit[] inputSplits, String[] readSchedule, String nodeName,
-             JobConf conf, Map<String, String> configuration, List<ExternalFile> snapshot)
-                     throws IOException, AsterixException {
++            JobConf conf, Map<String, String> configuration, List<ExternalFile> snapshot, IExternalIndexer indexer)
++            throws IOException, AsterixException {
 +        this.read = read;
 +        this.inputSplits = inputSplits;
 +        this.readSchedule = readSchedule;
 +        this.nodeName = nodeName;
 +        this.conf = conf;
 +        this.inputFormat = conf.getInputFormat();
 +        this.reader = new EmptyRecordReader<Object, Text>();
 +        this.snapshot = snapshot;
 +        this.hdfs = FileSystem.get(conf);
++        this.indexer = indexer;
 +        nextInputSplit();
 +        this.value = new Text();
 +        if (snapshot != null) {
-             this.indexer = ExternalIndexerProvider.getIndexer(configuration);
 +            if (currentSplitIndex < snapshot.size()) {
 +                indexer.reset(this);
 +            }
-         } else {
-             this.indexer = null;
 +        }
 +    }
 +
 +    @Override
 +    public int read() throws IOException {
 +        if (value.getLength() < pos) {
 +            if (!readMore()) {
 +                return -1;
 +            }
 +        } else if (value.getLength() == pos) {
 +            pos++;
 +            return ExternalDataConstants.BYTE_LF;
 +        }
 +        return value.getBytes()[pos++];
 +    }
 +
 +    private int readRecord(byte[] buffer, int offset, int len) {
 +        int actualLength = value.getLength() + 1;
 +        if ((actualLength - pos) > len) {
 +            //copy partial record
 +            System.arraycopy(value.getBytes(), pos, buffer, offset, len);
 +            pos += len;
 +            return len;
 +        } else {
 +            int numBytes = value.getLength() - pos;
 +            System.arraycopy(value.getBytes(), pos, buffer, offset, numBytes);
 +            buffer[offset + numBytes] = ExternalDataConstants.LF;
 +            pos += numBytes;
 +            numBytes++;
 +            return numBytes;
 +        }
 +    }
 +
 +    @Override
 +    public int read(byte[] buffer, int offset, int len) throws IOException {
 +        if (value.getLength() > pos) {
 +            return readRecord(buffer, offset, len);
 +        }
 +        if (!readMore()) {
 +            return -1;
 +        }
 +        return readRecord(buffer, offset, len);
 +    }
 +
 +    private boolean readMore() throws IOException {
 +        try {
 +            pos = 0;
 +            return HDFSInputStream.this.hasNext();
 +        } catch (Exception e) {
 +            throw new IOException(e);
 +        }
 +    }
 +
 +    @Override
 +    public boolean stop() throws Exception {
 +        return false;
 +    }
 +
 +    @Override
 +    public boolean handleException(Throwable th) {
 +        return false;
 +    }
 +
 +    @Override
 +    public void close() throws IOException {
 +        reader.close();
 +    }
 +
 +    private boolean hasNext() throws Exception {
 +        if (reader.next(key, value)) {
 +            return true;
 +        }
 +        while (nextInputSplit()) {
 +            if (reader.next(key, value)) {
 +                return true;
 +            }
 +        }
 +        return false;
 +    }
 +
 +    private boolean nextInputSplit() throws IOException {
 +        for (; currentSplitIndex < inputSplits.length; currentSplitIndex++) {
 +            /**
 +             * read all the partitions scheduled to the current node
 +             */
 +            if (readSchedule[currentSplitIndex].equals(nodeName)) {
 +                /**
 +                 * pick an unread split to read synchronize among
 +                 * simultaneous partitions in the same machine
 +                 */
 +                synchronized (read) {
 +                    if (read[currentSplitIndex] == false) {
 +                        read[currentSplitIndex] = true;
 +                    } else {
 +                        continue;
 +                    }
 +                }
 +                if (snapshot != null) {
 +                    String fileName = ((FileSplit) (inputSplits[currentSplitIndex])).getPath().toUri().getPath();
 +                    FileStatus fileStatus = hdfs.getFileStatus(new Path(fileName));
 +                    // Skip if not the same file stored in the files snapshot
 +                    if (fileStatus.getModificationTime() != snapshot.get(currentSplitIndex).getLastModefiedTime()
 +                            .getTime()) {
 +                        continue;
 +                    }
 +                }
 +
 +                reader.close();
 +                reader = getRecordReader(currentSplitIndex);
 +                return true;
 +            }
 +        }
 +        return false;
 +    }
 +
 +    @SuppressWarnings("unchecked")
 +    private RecordReader<Object, Text> getRecordReader(int splitIndex) throws IOException {
 +        reader = (RecordReader<Object, Text>) inputFormat.getRecordReader(inputSplits[splitIndex], conf, Reporter.NULL);
 +        if (key == null) {
 +            key = reader.createKey();
 +            value = reader.createValue();
 +        }
 +        if (indexer != null) {
 +            try {
 +                indexer.reset(this);
 +            } catch (Exception e) {
 +                throw new HyracksDataException(e);
 +            }
 +        }
 +        return reader;
 +    }
 +
 +    @Override
 +    public IExternalIndexer getIndexer() {
 +        return indexer;
 +    }
 +
 +    @Override
 +    public List<ExternalFile> getSnapshot() {
 +        return snapshot;
 +    }
 +
 +    @Override
 +    public int getCurrentSplitIndex() {
 +        return currentSplitIndex;
 +    }
 +
 +    @Override
 +    public RecordReader<?, ? extends Writable> getReader() {
 +        return reader;
 +    }
 +}


[13/50] [abbrv] incubator-asterixdb git commit: Merge branch 'master' into hyracks-merge2

Posted by im...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-external-data/src/test/java/org/apache/asterix/external/library/ClassAdParser.java
----------------------------------------------------------------------
diff --cc asterixdb/asterix-external-data/src/test/java/org/apache/asterix/external/library/ClassAdParser.java
index 5fd6f21,0000000..93b31ca
mode 100644,000000..100644
--- a/asterixdb/asterix-external-data/src/test/java/org/apache/asterix/external/library/ClassAdParser.java
+++ b/asterixdb/asterix-external-data/src/test/java/org/apache/asterix/external/library/ClassAdParser.java
@@@ -1,1728 -1,0 +1,1778 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +package org.apache.asterix.external.library;
 +
 +import java.io.DataOutput;
 +import java.io.File;
 +import java.io.IOException;
 +import java.io.InputStream;
 +import java.util.BitSet;
 +import java.util.List;
 +import java.util.Map;
 +import java.util.Map.Entry;
 +
 +import org.apache.asterix.builders.AbvsBuilderFactory;
 +import org.apache.asterix.builders.IARecordBuilder;
 +import org.apache.asterix.builders.IAsterixListBuilder;
 +import org.apache.asterix.builders.ListBuilderFactory;
 +import org.apache.asterix.builders.OrderedListBuilder;
 +import org.apache.asterix.builders.RecordBuilderFactory;
 +import org.apache.asterix.builders.UnorderedListBuilder;
 +import org.apache.asterix.common.exceptions.AsterixException;
 +import org.apache.asterix.external.api.IRawRecord;
 +import org.apache.asterix.external.api.IRecordDataParser;
 +import org.apache.asterix.external.classad.AMutableCharArrayString;
 +import org.apache.asterix.external.classad.AMutableNumberFactor;
 +import org.apache.asterix.external.classad.AttributeReference;
 +import org.apache.asterix.external.classad.CaseInsensitiveString;
 +import org.apache.asterix.external.classad.CharArrayLexerSource;
 +import org.apache.asterix.external.classad.ClassAd;
 +import org.apache.asterix.external.classad.ExprList;
 +import org.apache.asterix.external.classad.ExprTree;
 +import org.apache.asterix.external.classad.ExprTree.NodeKind;
 +import org.apache.asterix.external.classad.ExprTreeHolder;
 +import org.apache.asterix.external.classad.FileLexerSource;
 +import org.apache.asterix.external.classad.FunctionCall;
 +import org.apache.asterix.external.classad.InputStreamLexerSource;
 +import org.apache.asterix.external.classad.Lexer;
 +import org.apache.asterix.external.classad.Lexer.TokenType;
 +import org.apache.asterix.external.classad.LexerSource;
 +import org.apache.asterix.external.classad.Literal;
 +import org.apache.asterix.external.classad.Operation;
 +import org.apache.asterix.external.classad.StringLexerSource;
 +import org.apache.asterix.external.classad.TokenValue;
 +import org.apache.asterix.external.classad.Value;
 +import org.apache.asterix.external.classad.Value.NumberFactor;
 +import org.apache.asterix.external.classad.object.pool.ClassAdObjectPool;
 +import org.apache.asterix.external.parser.AbstractDataParser;
 +import org.apache.asterix.om.base.ABoolean;
 +import org.apache.asterix.om.base.AMutableInt32;
 +import org.apache.asterix.om.types.AOrderedListType;
 +import org.apache.asterix.om.types.ARecordType;
 +import org.apache.asterix.om.types.ATypeTag;
 +import org.apache.asterix.om.types.AUnionType;
 +import org.apache.asterix.om.types.AUnorderedListType;
 +import org.apache.asterix.om.types.IAType;
 +import org.apache.asterix.om.types.hierachy.ATypeHierarchy;
 +import org.apache.asterix.om.util.NonTaggedFormatUtil;
 +import org.apache.asterix.om.util.container.IObjectPool;
 +import org.apache.asterix.om.util.container.ListObjectPool;
 +import org.apache.commons.lang3.mutable.MutableBoolean;
 +import org.apache.hyracks.api.exceptions.HyracksDataException;
 +import org.apache.hyracks.data.std.api.IMutableValueStorage;
 +import org.apache.hyracks.data.std.util.ArrayBackedValueStorage;
 +
 +/// This reads ClassAd strings from various sources and converts them into a ClassAd.
 +/// It can read from Strings, Files, and InputStreams.
 +public class ClassAdParser extends AbstractDataParser implements IRecordDataParser<char[]> {
 +
 +    // reusable components
 +    private Lexer lexer = new Lexer();
 +    private LexerSource currentSource = null;
 +    private boolean isExpr = false;
 +    private final ClassAdObjectPool objectPool;
 +    // asterix objects
 +    private ARecordType recordType;
 +    private IObjectPool<IARecordBuilder, ATypeTag> recordBuilderPool = new ListObjectPool<IARecordBuilder, ATypeTag>(
 +            new RecordBuilderFactory());
 +    private IObjectPool<IAsterixListBuilder, ATypeTag> listBuilderPool = new ListObjectPool<IAsterixListBuilder, ATypeTag>(
 +            new ListBuilderFactory());
 +    private IObjectPool<IMutableValueStorage, ATypeTag> abvsBuilderPool = new ListObjectPool<IMutableValueStorage, ATypeTag>(
 +            new AbvsBuilderFactory());
 +    private final ClassAd rootAd;
 +    private String exprPrefix = "expr=";
 +    private String exprSuffix = "";
 +    private boolean evaluateExpr = true;
 +    private String exprFieldNameSuffix = "Expr";
 +    private boolean keepBoth = true;
 +    private boolean oldFormat = true;
 +    private StringLexerSource stringLexerSource = new StringLexerSource("");
 +
 +    public ClassAdParser(ARecordType recordType, boolean oldFormat, boolean evaluateExpr, boolean keepBoth,
 +            String exprPrefix, String exprSuffix, String exprFieldNameSuffix, ClassAdObjectPool objectPool) {
 +        if (objectPool == null) {
 +            System.out.println();
 +        }
 +        this.objectPool = objectPool;
 +        this.rootAd = new ClassAd(objectPool);
 +        this.recordType = recordType;
 +        this.currentSource = new CharArrayLexerSource();
 +        this.recordType = recordType;
 +        this.oldFormat = oldFormat;
 +        if (oldFormat) {
 +            rootAd.createParser();
 +        }
 +        this.keepBoth = keepBoth;
 +        this.evaluateExpr = evaluateExpr;
 +        this.exprPrefix = exprPrefix;
 +        this.exprSuffix = exprSuffix;
 +        this.exprFieldNameSuffix = exprFieldNameSuffix;
 +    }
 +
 +    public ClassAdParser(ClassAdObjectPool objectPool) {
 +        if (objectPool == null) {
 +            System.out.println();
 +        }
 +        this.objectPool = objectPool;
 +        this.currentSource = new CharArrayLexerSource();
 +        rootAd = null;
 +    }
 +
 +    /***********************************
 +     * AsterixDB Specific begin
 +     *
 +     * @throws AsterixException
 +     ***********************************/
 +    public void asterixParse(ClassAd classad, DataOutput out) throws IOException, AsterixException {
 +        // we assume the lexer source used here is a char array
 +        parseClassAd(currentSource, classad, false);
 +        parseRecord(null, classad, out);
 +    }
 +
 +    public void handleErrorParsing() throws IOException {
 +    }
 +
 +    private boolean asterixParseClassAd(ClassAd ad) throws IOException {
 +        TokenType tt;
 +        ad.clear();
 +        lexer.initialize(currentSource);
 +        if ((tt = lexer.consumeToken()) != TokenType.LEX_OPEN_BOX) {
 +            handleErrorParsing();
 +            return false;
 +        }
 +        tt = lexer.peekToken();
 +        TokenValue tv = objectPool.tokenValuePool.get();
 +        ExprTreeHolder tree = objectPool.mutableExprPool.get();
 +        while (tt != TokenType.LEX_CLOSE_BOX) {
 +            // Get the name of the expression
 +            tv.reset();
 +            tree.reset();
 +            tt = lexer.consumeToken(tv);
 +            if (tt == TokenType.LEX_SEMICOLON) {
 +                // We allow empty expressions, so if someone give a double
 +                // semicolon, it doesn't
 +                // hurt. Technically it's not right, but we shouldn't make users
 +                // pay the price for
 +                // a meaningless mistake. See condor-support #1881 for a user
 +                // that was bitten by this.
 +                continue;
 +            }
 +            if (tt != TokenType.LEX_IDENTIFIER) {
 +                throw new HyracksDataException(
 +                        "while parsing classad:  expected LEX_IDENTIFIER " + " but got " + Lexer.strLexToken(tt));
 +            }
 +
 +            // consume the intermediate '='
 +            if ((tt = lexer.consumeToken()) != TokenType.LEX_BOUND_TO) {
 +                throw new HyracksDataException(
 +                        "while parsing classad:  expected LEX_BOUND_TO " + " but got " + Lexer.strLexToken(tt));
 +            }
 +
 +            int positionBefore = lexer.getLexSource().getPosition();
 +            isExpr = false;
 +            // parse the expression
 +            parseExpression(tree);
 +            if (tree.getInnerTree() == null) {
 +                handleErrorParsing();
 +                throw new HyracksDataException("parse expression returned empty tree");
 +            }
 +
 +            if ((!evaluateExpr || keepBoth) && isExpr && positionBefore >= 0) {
 +                // we will store a string representation of the expression
-                 int len = lexer.getLexSource().getPosition() - positionBefore;
++                int len = lexer.getLexSource().getPosition() - positionBefore - 2;
 +                // add it as it is to the classAd
 +                Literal lit = objectPool.literalPool.get();
 +                Value exprVal = objectPool.valuePool.get();
-                 exprVal.setStringValue(exprPrefix
-                         + String.valueOf(lexer.getLexSource().getBuffer(), positionBefore, len) + exprSuffix);
++
++                exprVal.setStringValue((exprPrefix == null ? "" : exprPrefix)
++                        + String.valueOf(lexer.getLexSource().getBuffer(), positionBefore, len)
++                        + (exprSuffix == null ? "" : exprSuffix));
 +                Literal.createLiteral(lit, exprVal, NumberFactor.NO_FACTOR);
 +                if (!evaluateExpr) {
 +                    ad.insert(tv.getStrValue().toString(), lit);
 +                } else {
 +                    ad.insert(tv.getStrValue().toString() + exprFieldNameSuffix, lit);
 +                }
 +            }
 +            if (!isExpr || (evaluateExpr)) {
 +                // insert the attribute into the classad
 +                if (!ad.insert(tv.getStrValue().toString(), tree)) {
 +                    handleErrorParsing();
 +                    throw new HyracksDataException("Couldn't insert value to classad");
 +                }
 +            }
 +            // the next token must be a ';' or a ']'
 +            tt = lexer.peekToken();
 +            if (tt != TokenType.LEX_SEMICOLON && tt != TokenType.LEX_CLOSE_BOX) {
 +                handleErrorParsing();
 +                throw new HyracksDataException("while parsing classad:  expected LEX_SEMICOLON or "
 +                        + "LEX_CLOSE_BOX but got " + Lexer.strLexToken(tt));
 +            }
 +
 +            // Slurp up any extra semicolons. This does not duplicate the work
 +            // at the top of the loop
 +            // because it accounts for the case where the last expression has
 +            // extra semicolons,
 +            // while the first case accounts for optional beginning semicolons.
 +            while (tt == TokenType.LEX_SEMICOLON) {
 +                lexer.consumeToken();
 +                tt = lexer.peekToken();
 +            }
 +        }
 +        return true;
 +    }
 +
 +    public static String readLine(char[] buffer, AMutableInt32 offset, int maxOffset) {
 +        int position = offset.getIntegerValue();
 +        while (buffer[position] != '\n' && position < maxOffset) {
 +            position++;
 +        }
 +        if (offset.getIntegerValue() == position) {
 +            return null;
 +        }
 +        String line = String.valueOf(buffer, offset.getIntegerValue(), position - offset.getIntegerValue());
 +        position++;
 +        offset.setValue(position);
 +        return line;
 +    }
 +
 +    private AMutableInt32 aInt32 = new AMutableInt32(0);
 +
 +    /**
 +     * Resets the pools before parsing a top-level record. In this way the
 +     * elements in those pools can be re-used.
 +     */
 +    private void resetPools() {
 +        listBuilderPool.reset();
 +        recordBuilderPool.reset();
 +        abvsBuilderPool.reset();
 +        objectPool.reset();
 +    }
 +
 +    private ATypeTag getTargetTypeTag(ATypeTag expectedTypeTag, IAType aObjectType) throws IOException {
 +        if (aObjectType == null) {
 +            return expectedTypeTag;
 +        }
 +        if (aObjectType.getTypeTag() != ATypeTag.UNION) {
 +            final ATypeTag typeTag = aObjectType.getTypeTag();
 +            if (ATypeHierarchy.canPromote(expectedTypeTag, typeTag)
 +                    || ATypeHierarchy.canDemote(expectedTypeTag, typeTag)) {
 +                return typeTag;
 +            } else {
 +                return null;
 +            }
 +        } else { // union
 +            List<IAType> unionList = ((AUnionType) aObjectType).getUnionList();
 +            for (IAType t : unionList) {
 +                final ATypeTag typeTag = t.getTypeTag();
 +                if (ATypeHierarchy.canPromote(expectedTypeTag, typeTag)
 +                        || ATypeHierarchy.canDemote(expectedTypeTag, typeTag)) {
 +                    return typeTag;
 +                }
 +            }
 +        }
 +        return null;
 +    }
 +
 +    private void parseRecord(ARecordType recType, ClassAd pAd, DataOutput out) throws IOException, AsterixException {
 +        ArrayBackedValueStorage fieldValueBuffer = getTempBuffer();
 +        ArrayBackedValueStorage fieldNameBuffer = getTempBuffer();
 +        IARecordBuilder recBuilder = getRecordBuilder();
 +        BitSet nulls = null;
 +        if (recType != null) {
 +            nulls = getBitSet();
 +            recBuilder.reset(recType);
 +        } else {
 +            recBuilder.reset(null);
 +        }
 +        recBuilder.init();
 +        Boolean openRecordField = false;
 +        int fieldId = 0;
 +        IAType fieldType = null;
 +
 +        // new stuff
 +        Map<CaseInsensitiveString, ExprTree> attrs = pAd.getAttrList();
 +        for (Entry<CaseInsensitiveString, ExprTree> entry : attrs.entrySet()) {
 +            // reset buffers
 +            fieldNameBuffer.reset();
 +            fieldValueBuffer.reset();
 +            // take care of field name
 +            String fldName = entry.getKey().get();
 +            if (recType != null) {
 +                fieldId = recBuilder.getFieldId(fldName);
 +                if (fieldId < 0 && !recType.isOpen()) {
 +                    throw new HyracksDataException("This record is closed, you can not add extra fields !!");
 +                } else if (fieldId < 0 && recType.isOpen()) {
 +                    aStringFieldName.setValue(fldName);
-                     if (aStringFieldName.getStringValue().contains("org.apache.asterix.external.classad.TokenValue")) {
-                         System.err.println("we have a problem");
-                     }
 +                    stringSerde.serialize(aStringFieldName, fieldNameBuffer.getDataOutput());
 +                    openRecordField = true;
 +                    fieldType = null;
 +                } else {
 +                    // a closed field
 +                    nulls.set(fieldId);
 +                    fieldType = recType.getFieldTypes()[fieldId];
 +                    openRecordField = false;
 +                }
 +            } else {
 +                aStringFieldName.setValue(fldName);
 +                stringSerde.serialize(aStringFieldName, fieldNameBuffer.getDataOutput());
 +                openRecordField = true;
 +                fieldType = null;
 +            }
 +
 +            // add field value to value buffer
 +            writeFieldValueToBuffer(fieldType, fieldValueBuffer.getDataOutput(), fldName, entry.getValue(), pAd);
 +            if (openRecordField) {
 +                if (fieldValueBuffer.getByteArray()[0] != ATypeTag.NULL.serialize()) {
 +                    recBuilder.addField(fieldNameBuffer, fieldValueBuffer);
 +                }
 +            } else if (NonTaggedFormatUtil.isOptional(fieldType)) {
 +                if (fieldValueBuffer.getByteArray()[0] != ATypeTag.NULL.serialize()) {
 +                    recBuilder.addField(fieldId, fieldValueBuffer);
 +                }
 +            } else {
 +                recBuilder.addField(fieldId, fieldValueBuffer);
 +            }
 +        }
 +
 +        if (recType != null) {
 +            int nullableFieldId = checkNullConstraints(recType, nulls);
 +            if (nullableFieldId != -1) {
 +                throw new HyracksDataException(
 +                        "Field: " + recType.getFieldNames()[nullableFieldId] + " can not be null");
 +            }
 +        }
 +        recBuilder.write(out, true);
 +    }
 +
-     // The only method left
 +    private void writeFieldValueToBuffer(IAType fieldType, DataOutput out, String name, ExprTree tree, ClassAd pAd)
 +            throws IOException, AsterixException {
 +        Value val;
 +        switch (tree.getKind()) {
 +            case ATTRREF_NODE:
 +            case CLASSAD_NODE:
 +            case EXPR_ENVELOPE:
 +            case EXPR_LIST_NODE:
 +            case FN_CALL_NODE:
 +            case OP_NODE:
 +                val = objectPool.valuePool.get();
 +                if (pAd.evaluateAttr(name, val)) {
- 
 +                } else {
 +                    // just write the expr
 +                    val = ((Literal) pAd.getAttrList().get(name + "Expr")).getValue();
 +                }
 +                break;
 +            case LITERAL_NODE:
 +                val = ((Literal) tree.getTree()).getValue();
 +                break;
 +            default:
 +                throw new HyracksDataException("Unknown Expression type detected: " + tree.getKind());
 +        }
 +
 +        switch (val.getValueType()) {
 +            case ABSOLUTE_TIME_VALUE:
 +                if (checkType(ATypeTag.DATETIME, fieldType)) {
 +                    parseDateTime(val, out);
 +                } else {
 +                    throw new HyracksDataException(mismatchErrorMessage + fieldType.getTypeTag());
 +                }
 +                break;
 +            case BOOLEAN_VALUE:
 +                if (checkType(ATypeTag.BOOLEAN, fieldType)) {
 +                    booleanSerde.serialize(val.getBoolVal() ? ABoolean.TRUE : ABoolean.FALSE, out);
 +                } else {
 +                    throw new HyracksDataException(mismatchErrorMessage + fieldType.getTypeTag());
 +                }
 +                break;
 +            case CLASSAD_VALUE:
 +                if (checkType(ATypeTag.RECORD, fieldType)) {
 +                    IAType objectType = getComplexType(fieldType, ATypeTag.RECORD);
 +                    ClassAd classad = val.getClassadVal();
 +                    parseRecord((ARecordType) objectType, classad, out);
 +                } else {
 +                    throw new HyracksDataException(mismatchErrorMessage + fieldType.getTypeTag());
 +                }
 +                break;
 +            case ERROR_VALUE:
 +            case STRING_VALUE:
 +            case UNDEFINED_VALUE:
 +                if (checkType(ATypeTag.STRING, fieldType)) {
 +                    parseString(val, out);
 +                } else {
 +                    throw new HyracksDataException(mismatchErrorMessage + fieldType.getTypeTag());
 +                }
 +                break;
 +            case INTEGER_VALUE:
 +                if (checkType(ATypeTag.INT64, fieldType)) {
-                     aInt64.setValue(val.getLongVal());
-                     int64Serde.serialize(aInt64, out);
++                    if (fieldType == null || fieldType.getTypeTag() == ATypeTag.INT64) {
++                        aInt64.setValue(val.getLongVal());
++                        int64Serde.serialize(aInt64, out);
++                    } else if (fieldType.getTypeTag() == ATypeTag.INT32) {
++                        aInt32.setValue((int) val.getLongVal());
++                        int32Serde.serialize(aInt32, out);
++                    } else if (fieldType.getTypeTag() == ATypeTag.DOUBLE) {
++                        aDouble.setValue(val.getLongVal());
++                        doubleSerde.serialize(aDouble, out);
++                    } else if (fieldType.getTypeTag() == ATypeTag.INT16) {
++                        aInt16.setValue((short) val.getLongVal());
++                        int16Serde.serialize(aInt16, out);
++                    } else if (fieldType.getTypeTag() == ATypeTag.INT8) {
++                        aInt8.setValue((byte) val.getLongVal());
++                        int8Serde.serialize(aInt8, out);
++                    } else if (fieldType.getTypeTag() == ATypeTag.FLOAT) {
++                        aFloat.setValue(val.getLongVal());
++                        floatSerde.serialize(aFloat, out);
++                    }
++                } else if (checkType(ATypeTag.DATETIME, fieldType)) {
++                    // Classad uses Linux Timestamps (s instead of ms)
++                    aDateTime.setValue(val.getLongVal() * 1000);
++                    datetimeSerde.serialize(aDateTime, out);
++                } else if (checkType(ATypeTag.DURATION, fieldType)) {
++                    // Classad uses Linux Timestamps (s instead of ms)
++                    aDuration.setValue(0, val.getLongVal() * 1000);
++                    durationSerde.serialize(aDuration, out);
++                } else if (checkType(ATypeTag.INT32, fieldType)) {
++                    aInt32.setValue((int) val.getLongVal());
++                    int32Serde.serialize(aInt32, out);
 +                } else if (checkType(ATypeTag.DOUBLE, fieldType)) {
 +                    aDouble.setValue(val.getLongVal());
 +                    doubleSerde.serialize(aDouble, out);
 +                } else {
 +                    throw new HyracksDataException(mismatchErrorMessage + fieldType.getTypeTag());
 +                }
 +                break;
 +            case LIST_VALUE:
 +            case SLIST_VALUE:
 +                IAType objectType;
 +                if (checkType(ATypeTag.UNORDEREDLIST, fieldType)) {
 +                    objectType = getComplexType(fieldType, ATypeTag.UNORDEREDLIST);
 +                    parseUnorderedList((AUnorderedListType) objectType, val, out);
 +                } else if (checkType(ATypeTag.ORDEREDLIST, fieldType)) {
 +                    objectType = getComplexType(fieldType, ATypeTag.ORDEREDLIST);
 +                    parseOrderedList((AOrderedListType) objectType, val, out);
 +                } else {
 +                    throw new HyracksDataException(mismatchErrorMessage + fieldType.getTypeTag());
 +                }
 +                break;
 +            case REAL_VALUE:
 +                if (checkType(ATypeTag.DOUBLE, fieldType)) {
-                     aDouble.setValue(val.getDoubleVal());
-                     doubleSerde.serialize(aDouble, out);
++                    if (fieldType == null || fieldType.getTypeTag() == ATypeTag.DOUBLE) {
++                        aDouble.setValue(val.getDoubleVal());
++                        doubleSerde.serialize(aDouble, out);
++                    } else if (fieldType.getTypeTag() == ATypeTag.INT32) {
++                        aInt32.setValue((int) val.getDoubleVal());
++                        int32Serde.serialize(aInt32, out);
++                    } else if (fieldType.getTypeTag() == ATypeTag.INT64) {
++                        aInt64.setValue((long) val.getDoubleVal());
++                        int64Serde.serialize(aInt64, out);
++                    } else if (fieldType.getTypeTag() == ATypeTag.INT16) {
++                        aInt16.setValue((short) val.getDoubleVal());
++                        int16Serde.serialize(aInt16, out);
++                    } else if (fieldType.getTypeTag() == ATypeTag.INT8) {
++                        aInt8.setValue((byte) val.getDoubleVal());
++                        int8Serde.serialize(aInt8, out);
++                    } else if (fieldType.getTypeTag() == ATypeTag.FLOAT) {
++                        aFloat.setValue((float) val.getDoubleVal());
++                        floatSerde.serialize(aFloat, out);
++                    }
 +                } else if (checkType(ATypeTag.INT32, fieldType)) {
 +                    aInt32.setValue((int) val.getDoubleVal());
 +                    int32Serde.serialize(aInt32, out);
 +                } else if (checkType(ATypeTag.INT64, fieldType)) {
 +                    aInt64.setValue((long) val.getDoubleVal());
 +                    int64Serde.serialize(aInt64, out);
++                } else if (checkType(ATypeTag.DATETIME, fieldType)) {
++                    // Classad uses Linux Timestamps (s instead of ms)
++                    aDateTime.setValue(val.getLongVal() * 1000);
++                    datetimeSerde.serialize(aDateTime, out);
++                } else if (checkType(ATypeTag.DURATION, fieldType)) {
++                    // Classad uses Linux Timestamps (s instead of ms)
++                    aDuration.setValue(0, (long) (val.getDoubleVal() * 1000.0));
++                    durationSerde.serialize(aDuration, out);
 +                } else {
 +                    throw new HyracksDataException(mismatchErrorMessage + fieldType.getTypeTag());
 +                }
 +                break;
 +            case RELATIVE_TIME_VALUE:
 +                if (checkType(ATypeTag.DURATION, fieldType)) {
 +                    parseDuration(val, out);
 +                } else {
 +                    throw new HyracksDataException(mismatchErrorMessage + fieldType.getTypeTag());
 +                }
 +                break;
 +            default:
 +                throw new HyracksDataException("unknown data type " + val.getValueType());
 +        }
 +    }
 +
 +    private void parseOrderedList(AOrderedListType oltype, Value listVal, DataOutput out)
 +            throws IOException, AsterixException {
 +        ArrayBackedValueStorage itemBuffer = getTempBuffer();
 +        OrderedListBuilder orderedListBuilder = (OrderedListBuilder) getOrderedListBuilder();
 +        IAType itemType = null;
 +        if (oltype != null) {
 +            itemType = oltype.getItemType();
 +        }
 +        orderedListBuilder.reset(oltype);
 +        for (ExprTree tree : listVal.getListVal().getExprList()) {
 +            itemBuffer.reset();
 +            writeFieldValueToBuffer(itemType, itemBuffer.getDataOutput(), null, tree, null);
 +            orderedListBuilder.addItem(itemBuffer);
 +        }
 +        orderedListBuilder.write(out, true);
 +    }
 +
 +    private void parseUnorderedList(AUnorderedListType uoltype, Value listVal, DataOutput out)
 +            throws IOException, AsterixException {
 +        ArrayBackedValueStorage itemBuffer = getTempBuffer();
 +        UnorderedListBuilder unorderedListBuilder = (UnorderedListBuilder) getUnorderedListBuilder();
 +        IAType itemType = null;
 +        if (uoltype != null) {
 +            itemType = uoltype.getItemType();
 +        }
 +        unorderedListBuilder.reset(uoltype);
 +        for (ExprTree tree : listVal.getListVal().getExprList()) {
 +            itemBuffer.reset();
 +            writeFieldValueToBuffer(itemType, itemBuffer.getDataOutput(), null, tree, null);
 +            unorderedListBuilder.addItem(itemBuffer);
 +        }
 +        unorderedListBuilder.write(out, true);
 +    }
 +
 +    private void parseString(Value val, DataOutput out) throws HyracksDataException {
 +        switch (val.getValueType()) {
 +            case ERROR_VALUE:
 +                aString.setValue("error");
 +                break;
 +            case STRING_VALUE:
 +                aString.setValue(val.getStringVal());
 +                break;
 +            case UNDEFINED_VALUE:
 +                aString.setValue("undefined");
 +                break;
 +            default:
 +                throw new HyracksDataException("Unknown String type " + val.getValueType());
 +        }
 +        stringSerde.serialize(aString, out);
 +    }
 +
 +    protected void parseDuration(Value duration, DataOutput out) throws HyracksDataException {
 +        try {
 +            aDuration.setValue(0, duration.getTimeVal().getRelativeTime());
 +            durationSerde.serialize(aDuration, out);
 +        } catch (Exception e) {
 +            throw new HyracksDataException(e);
 +        }
 +    }
 +
 +    protected void parseDateTime(Value datetime, DataOutput out) throws HyracksDataException {
 +        aDateTime.setValue(datetime.getTimeVal().getTimeInMillis());
 +        datetimeSerde.serialize(aDateTime, out);
 +    }
 +
 +    public static IAType getComplexType(IAType aObjectType, ATypeTag tag) {
 +        if (aObjectType == null) {
 +            return null;
 +        }
 +
 +        if (aObjectType.getTypeTag() == tag) {
 +            return aObjectType;
 +        }
 +
 +        if (aObjectType.getTypeTag() == ATypeTag.UNION) {
 +            List<IAType> unionList = ((AUnionType) aObjectType).getUnionList();
 +            for (int i = 0; i < unionList.size(); i++) {
 +                if (unionList.get(i).getTypeTag() == tag) {
 +                    return unionList.get(i);
 +                }
 +            }
 +        }
 +        return null; // wont get here
 +    }
 +
 +    private String mismatchErrorMessage = "Mismatch Type, expecting a value of type ";
 +
 +    private boolean checkType(ATypeTag expectedTypeTag, IAType aObjectType) throws IOException {
 +        return getTargetTypeTag(expectedTypeTag, aObjectType) != null;
 +    }
 +
 +    private BitSet getBitSet() {
 +        return objectPool.bitSetPool.get();
 +    }
 +
 +    public static int checkNullConstraints(ARecordType recType, BitSet nulls) {
 +        boolean isNull = false;
 +        for (int i = 0; i < recType.getFieldTypes().length; i++) {
 +            if (nulls.get(i) == false) {
 +                IAType type = recType.getFieldTypes()[i];
 +                if (type.getTypeTag() != ATypeTag.NULL && type.getTypeTag() != ATypeTag.UNION) {
 +                    return i;
 +                }
 +
 +                if (type.getTypeTag() == ATypeTag.UNION) { // union
 +                    List<IAType> unionList = ((AUnionType) type).getUnionList();
 +                    for (int j = 0; j < unionList.size(); j++) {
 +                        if (unionList.get(j).getTypeTag() == ATypeTag.NULL) {
 +                            isNull = true;
 +                            break;
 +                        }
 +                    }
 +                    if (!isNull) {
 +                        return i;
 +                    }
 +                }
 +            }
 +        }
 +        return -1;
 +    }
 +
 +    private IARecordBuilder getRecordBuilder() {
 +        return recordBuilderPool.allocate(ATypeTag.RECORD);
 +    }
 +
 +    private IAsterixListBuilder getOrderedListBuilder() {
 +        return listBuilderPool.allocate(ATypeTag.ORDEREDLIST);
 +    }
 +
 +    private IAsterixListBuilder getUnorderedListBuilder() {
 +        return listBuilderPool.allocate(ATypeTag.UNORDEREDLIST);
 +    }
 +
 +    private ArrayBackedValueStorage getTempBuffer() {
 +        return (ArrayBackedValueStorage) abvsBuilderPool.allocate(ATypeTag.BINARY);
 +    }
 +
 +    public static ATypeTag getMatchingType(Literal lit) throws HyracksDataException {
 +        return getMatchingType(lit.getValue());
 +    }
 +
 +    public static ATypeTag getMatchingType(Value val) throws HyracksDataException {
 +        switch (val.getValueType()) {
 +            case ABSOLUTE_TIME_VALUE:
 +                return ATypeTag.DATETIME;
 +            case BOOLEAN_VALUE:
 +                return ATypeTag.BOOLEAN;
 +            case CLASSAD_VALUE:
 +                return ATypeTag.RECORD;
 +            case ERROR_VALUE:
 +            case STRING_VALUE:
 +            case UNDEFINED_VALUE:
 +                return ATypeTag.STRING;
 +            case INTEGER_VALUE:
 +                return ATypeTag.INT64;
 +            case LIST_VALUE:
 +            case SLIST_VALUE:
 +                return ATypeTag.UNORDEREDLIST;
 +            case NULL_VALUE:
 +                return ATypeTag.NULL;
 +            case REAL_VALUE:
 +                return ATypeTag.DOUBLE;
 +            case RELATIVE_TIME_VALUE:
 +                return ATypeTag.DURATION;
 +            default:
 +                throw new HyracksDataException("Unknown data type");
 +        }
 +    }
 +
 +    /********************************
 +     * End of AsterixDB specifics
 +     ********************************/
 +
 +    /**
 +     * Parse a ClassAd
 +     *
 +     * @param buffer
 +     *            Buffer containing the string representation of the classad.
 +     * @param full
 +     *            If this parameter is true, the parse is considered to succeed
 +     *            only if the ClassAd was parsed successfully and no other
 +     *            tokens follow the ClassAd.
 +     * @return pointer to the ClassAd object if successful, or null otherwise
 +     * @throws IOException
 +     */
 +    public ClassAd parseClassAd(String buffer, boolean full) throws IOException {
 +        currentSource = new StringLexerSource(buffer);
 +        return parseClassAd(currentSource, full);
 +    }
 +
 +    public ClassAd parseClassAd(String buffer, AMutableInt32 offset) throws IOException {
 +        currentSource = new StringLexerSource(buffer);
 +        ClassAd ad = parseClassAd((StringLexerSource) currentSource);
 +        offset.setValue(((StringLexerSource) currentSource).getCurrentLocation());
 +        return ad;
 +    }
 +
 +    public ClassAd parseClassAd(StringLexerSource lexer_source) throws IOException {
 +        return parseClassAd(lexer_source, false);
 +    }
 +
 +    public ClassAd parseClassAd(File file, boolean full) throws IOException {
 +        FileLexerSource fileLexerSource = new FileLexerSource(file);
 +        return parseClassAd(fileLexerSource, full);
 +    }
 +
 +    public ClassAd parseClassAd(InputStream in, boolean full) throws IOException {
 +        InputStreamLexerSource lexer_source = new InputStreamLexerSource(in);
 +        return parseClassAd(lexer_source, full);
 +    }
 +
 +    // preferred method since the parser doesn't need to create an object
 +    public void parseClassAd(ClassAd ad, LexerSource lexer_source, boolean full) throws IOException {
 +        ad.reset();
 +        if (lexer.initialize(lexer_source)) {
 +            if (!parseClassAd(ad, full)) {
 +                return;
 +            } else if (lexer_source.readPreviousCharacter() != '\0') {
 +                // The lexer swallows one extra character, so if we have
 +                // two classads back to back we need to make sure to unread
 +                // one of the characters.
 +                lexer_source.unreadCharacter();
 +            }
 +        }
 +    }
 +
 +    public ClassAd parseClassAd(LexerSource lexer_source, boolean full) throws IOException {
 +        System.out.println("Don't use this call. instead, pass a mutable classad instance");
 +        ClassAd ad = objectPool.classAdPool.get();
 +        if (lexer.initialize(lexer_source)) {
 +            if (!parseClassAd(ad, full)) {
 +                return null;
 +            } else if (lexer_source.readPreviousCharacter() != '\0') {
 +                // The lexer swallows one extra character, so if we have
 +                // two classads back to back we need to make sure to unread
 +                // one of the characters.
 +                lexer_source.unreadCharacter();
 +            }
 +        }
 +        return ad;
 +    }
 +
 +    /**
 +     * Parse a ClassAd
 +     *
 +     * @param buffer
 +     *            Buffer containing the string representation of the classad.
 +     * @param ad
 +     *            The classad to be populated
 +     * @param full
 +     *            If this parameter is true, the parse is considered to succeed
 +     *            only if the ClassAd was parsed successfully and no other
 +     *            tokens follow the ClassAd.
 +     * @return true on success, false on failure
 +     * @throws IOException
 +     */
 +    public boolean parseClassAd(String buffer, ClassAd classad, boolean full) throws IOException {
 +        StringLexerSource stringLexerSource = new StringLexerSource(buffer);
 +        return parseClassAd(stringLexerSource, classad, full);
 +    }
 +
 +    public boolean parseClassAd(String buffer, ClassAd classad, AMutableInt32 offset) throws IOException {
 +        boolean success = false;
 +        StringLexerSource stringLexerSource = new StringLexerSource(buffer, offset.getIntegerValue().intValue());
 +        success = parseClassAd(stringLexerSource, classad);
 +        offset.setValue(stringLexerSource.getCurrentLocation());
 +        return success;
 +    }
 +
 +    public boolean parseNext(ClassAd classad) throws IOException {
 +        return parseClassAd(currentSource, classad, false);
 +    }
 +
 +    public boolean parseNext(ClassAd classad, boolean full) throws IOException {
 +        return parseClassAd(currentSource, classad, full);
 +    }
 +
 +    private boolean parseClassAd(StringLexerSource lexer_source, ClassAd classad) throws IOException {
 +        return parseClassAd(lexer_source, classad, false);
 +    }
 +
 +    public boolean parseClassAd(File file, ClassAd classad, boolean full) throws IOException {
 +        FileLexerSource fileLexerSource = new FileLexerSource(file);
 +        return parseClassAd(fileLexerSource, classad, full);
 +    }
 +
 +    public boolean parseClassAd(InputStream stream, ClassAd classad, boolean full) throws IOException {
 +        InputStreamLexerSource inputStreamLexerSource = new InputStreamLexerSource(stream);
 +        return parseClassAd(inputStreamLexerSource, classad, full);
 +    }
 +
 +    public boolean parseClassAd(LexerSource lexer_source, ClassAd classad, boolean full) throws IOException {
 +        boolean success = false;
 +        if (lexer.initialize(lexer_source)) {
 +            success = parseClassAd(classad, full);
 +        }
 +        if (success) {
 +            // The lexer swallows one extra character, so if we have
 +            // two classads back to back we need to make sure to unread
 +            // one of the characters.
 +            if (lexer_source.readPreviousCharacter() != Lexer.EOF) {
 +                lexer_source.unreadCharacter();
 +            }
 +        } else {
 +            classad.clear();
 +        }
 +        return success;
 +    }
 +
 +    /**
 +     * Parse an expression
 +     *
 +     * @param buffer
 +     *            Buffer containing the string representation of the expression.
 +     * @param full
 +     *            If this parameter is true, the parse is considered to succeed
 +     *            only if the expression was parsed successfully and no other
 +     *            tokens are left.
 +     * @return pointer to the expression object if successful, or null otherwise
 +     */
 +    public ExprTree parseExpression(String buffer, boolean full) throws IOException {
 +        stringLexerSource.setNewSource(buffer);
 +        ExprTreeHolder mutableExpr = objectPool.mutableExprPool.get();
 +        if (lexer.initialize(stringLexerSource)) {
 +            parseExpression(mutableExpr, full);
 +        }
 +        return mutableExpr.getInnerTree();
 +    }
 +
 +    public ExprTree ParseExpression(String buffer) throws IOException {
 +        return parseExpression(buffer, false);
 +    }
 +
 +    public ExprTree parseExpression(LexerSource lexer_source, boolean full) throws IOException {
 +        ExprTreeHolder mutableExpr = objectPool.mutableExprPool.get();
 +        if (lexer.initialize(lexer_source)) {
 +            parseExpression(mutableExpr, full);
 +        }
 +        return mutableExpr.getInnerTree();
 +    }
 +
 +    public ExprTree parseNextExpression() throws IOException {
 +        if (!lexer.wasInitialized()) {
 +            return null;
 +        } else {
 +            ExprTreeHolder expr = objectPool.mutableExprPool.get();
 +            parseExpression(expr, false);
 +            ExprTree innerTree = expr.getInnerTree();
 +            return innerTree;
 +        }
 +    }
 +
 +    /*--------------------------------------------------------------------
 +    *
 +    * Private Functions
 +    *
 +    *-------------------------------------------------------------------*/
 +
 +    // Expression .= LogicalORExpression
 +    // | LogicalORExpression '?' Expression ':' Expression
 +
 +    private boolean parseExpression(ExprTreeHolder tree) throws IOException {
 +        return parseExpression(tree, false);
 +    }
 +
 +    private boolean parseExpression(ExprTreeHolder tree, boolean full) throws IOException {
 +        TokenType tt;
 +        if (!parseLogicalORExpression(tree)) {
 +            return false;
 +        }
 +        if ((tt = lexer.peekToken()) == TokenType.LEX_QMARK) {
 +            lexer.consumeToken();
 +            ExprTreeHolder treeL = tree;
 +            ExprTreeHolder treeM = objectPool.mutableExprPool.get();
 +            ExprTreeHolder treeR = objectPool.mutableExprPool.get();
 +            parseExpression(treeM);
 +            if ((tt = lexer.consumeToken()) != TokenType.LEX_COLON) {
 +                throw new HyracksDataException("expected LEX_COLON, but got " + Lexer.strLexToken(tt));
 +            }
 +            parseExpression(treeR);
 +            if (treeL.getInnerTree() != null && treeM.getInnerTree() != null && treeR.getInnerTree() != null) {
 +                Operation newTree = objectPool.operationPool.get();
 +                Operation.createOperation(Operation.OpKind_TERNARY_OP, treeL, treeM, treeR, newTree);
 +                tree.setInnerTree(newTree);
 +                return (true);
 +            }
 +            tree.setInnerTree(null);
 +            return false;
 +        }
 +        // if a full parse was requested, ensure that input is exhausted
 +        if (full && (lexer.consumeToken() != TokenType.LEX_END_OF_INPUT)) {
 +            throw new HyracksDataException(
 +                    "expected LEX_END_OF_INPUT on full parse, but got " + String.valueOf(Lexer.strLexToken(tt)));
 +        }
 +        return true;
 +    }
 +
 +    // LogicalORExpression .= LogicalANDExpression
 +    // | LogicalORExpression '||' LogicalANDExpression
 +
 +    private boolean parseLogicalORExpression(ExprTreeHolder tree) throws IOException {
 +        if (!parseLogicalANDExpression(tree)) {
 +            return false;
 +        }
 +        while ((lexer.peekToken()) == TokenType.LEX_LOGICAL_OR) {
 +            ExprTreeHolder treeL = tree;
 +            ExprTreeHolder treeR = objectPool.mutableExprPool.get();
 +            lexer.consumeToken();
 +            parseLogicalANDExpression(treeR);
 +            if (treeL.getInnerTree() != null && treeR.getInnerTree() != null) {
 +                Operation newTree = objectPool.operationPool.get();
 +                Operation.createOperation(Operation.OpKind_LOGICAL_OR_OP, treeL, treeR, null, newTree);
 +                tree.setInnerTree(newTree);
 +            } else {
 +                tree.setInnerTree(null);
 +                return false;
 +            }
 +        }
 +        return true;
 +    }
 +
 +    // LogicalANDExpression .= InclusiveORExpression
 +    // | LogicalANDExpression '&&' InclusiveORExpression
 +    private boolean parseLogicalANDExpression(ExprTreeHolder tree) throws IOException {
 +        if (!parseInclusiveORExpression(tree)) {
 +            return false;
 +        }
 +        while ((lexer.peekToken()) == TokenType.LEX_LOGICAL_AND) {
 +            ExprTreeHolder treeL = tree;
 +            ExprTreeHolder treeR = objectPool.mutableExprPool.get();
 +            lexer.consumeToken();
 +            parseInclusiveORExpression(treeR);
 +            if (treeL.getInnerTree() != null && treeR.getInnerTree() != null) {
 +                Operation newTree = objectPool.operationPool.get();
 +                Operation.createOperation(Operation.OpKind_LOGICAL_AND_OP, treeL, treeR, null, newTree);
 +                tree.setInnerTree(newTree);
 +            } else {
 +                tree.setInnerTree(null);
 +                return false;
 +            }
 +        }
 +        return true;
 +    }
 +
 +    // InclusiveORExpression .= ExclusiveORExpression
 +    // | InclusiveORExpression '|' ExclusiveORExpression
 +    public boolean parseInclusiveORExpression(ExprTreeHolder tree) throws IOException {
 +        if (!parseExclusiveORExpression(tree)) {
 +            return false;
 +        }
 +        while ((lexer.peekToken()) == TokenType.LEX_BITWISE_OR) {
 +            ExprTreeHolder treeL = tree;
 +            ExprTreeHolder treeR = objectPool.mutableExprPool.get();
 +            lexer.consumeToken();
 +            parseExclusiveORExpression(treeR);
 +            if (treeL.getInnerTree() != null && treeR.getInnerTree() != null) {
 +                Operation newTree = objectPool.operationPool.get();
 +                Operation.createOperation(Operation.OpKind_BITWISE_OR_OP, treeL, treeR, null, newTree);
 +                tree.setInnerTree(newTree);
 +            } else {
 +                tree.setInnerTree(null);
 +                return false;
 +            }
 +        }
 +        return true;
 +    }
 +
 +    // ExclusiveORExpression .= ANDExpression
 +    // | ExclusiveORExpression '^' ANDExpression
 +    private boolean parseExclusiveORExpression(ExprTreeHolder tree) throws IOException {
 +        if (!parseANDExpression(tree)) {
 +            return false;
 +        }
 +        while ((lexer.peekToken()) == TokenType.LEX_BITWISE_XOR) {
 +            lexer.consumeToken();
 +            ExprTreeHolder treeL = tree;
 +            ExprTreeHolder treeR = objectPool.mutableExprPool.get();
 +            parseANDExpression(treeR);
 +            if (treeL.getInnerTree() != null && treeR.getInnerTree() != null) {
 +                Operation newTree = objectPool.operationPool.get();
 +                Operation.createOperation(Operation.OpKind_BITWISE_XOR_OP, treeL, treeR, null, newTree);
 +                tree.setInnerTree(newTree);
 +            } else {
 +                tree.setInnerTree(null);
 +                return false;
 +            }
 +        }
 +        return true;
 +    }
 +
 +    // ANDExpression .= EqualityExpression
 +    // | ANDExpression '&' EqualityExpression
 +    private boolean parseANDExpression(ExprTreeHolder tree) throws IOException {
 +        if (!parseEqualityExpression(tree)) {
 +            return false;
 +        }
 +        while ((lexer.peekToken()) == TokenType.LEX_BITWISE_AND) {
 +            ExprTreeHolder treeL = tree;
 +            ExprTreeHolder treeR = objectPool.mutableExprPool.get();
 +            lexer.consumeToken();
 +            parseEqualityExpression(treeR);
 +            if (treeL.getInnerTree() != null && treeR.getInnerTree() != null) {
 +                Operation newTree = objectPool.operationPool.get();
 +                Operation.createOperation(Operation.OpKind_BITWISE_AND_OP, treeL, treeR, null, newTree);
 +                tree.setInnerTree(newTree);
 +            } else {
 +                tree.setInnerTree(null);
 +                return false;
 +            }
 +        }
 +        return true;
 +    }
 +
 +    // EqualityExpression .= RelationalExpression
 +    // | EqualityExpression '==' RelationalExpression
 +    // | EqualityExpression '!=' RelationalExpression
 +    // | EqualityExpression '=?=' RelationalExpression
 +    // | EqualityExpression '=!=' RelationalExpression
 +    private boolean parseEqualityExpression(ExprTreeHolder tree) throws IOException {
 +        TokenType tt;
 +        int op = Operation.OpKind_NO_OP;
 +        if (!parseRelationalExpression(tree)) {
 +            return false;
 +        }
 +        tt = lexer.peekToken();
 +        while (tt == TokenType.LEX_EQUAL || tt == TokenType.LEX_NOT_EQUAL || tt == TokenType.LEX_META_EQUAL
 +                || tt == TokenType.LEX_META_NOT_EQUAL) {
 +            ExprTreeHolder treeL = tree;
 +            ExprTreeHolder treeR = objectPool.mutableExprPool.get();
 +            lexer.consumeToken();
 +            parseRelationalExpression(treeR);
 +            switch (tt) {
 +                case LEX_EQUAL:
 +                    op = Operation.OpKind_EQUAL_OP;
 +                    break;
 +                case LEX_NOT_EQUAL:
 +                    op = Operation.OpKind_NOT_EQUAL_OP;
 +                    break;
 +                case LEX_META_EQUAL:
 +                    op = Operation.OpKind_META_EQUAL_OP;
 +                    break;
 +                case LEX_META_NOT_EQUAL:
 +                    op = Operation.OpKind_META_NOT_EQUAL_OP;
 +                    break;
 +                default:
 +                    throw new HyracksDataException("ClassAd:  Should not reach here");
 +            }
 +            if (treeL.getInnerTree() != null && treeR.getInnerTree() != null) {
 +                Operation newTree = objectPool.operationPool.get();
 +                Operation.createOperation(op, treeL, treeR, null, newTree);
 +                tree.setInnerTree(newTree);
 +            } else {
 +                tree.setInnerTree(null);
 +                return false;
 +            }
 +            tt = lexer.peekToken();
 +        }
 +        return true;
 +    }
 +
 +    // RelationalExpression .= ShiftExpression
 +    // | RelationalExpression '<' ShiftExpression
 +    // | RelationalExpression '>' ShiftExpression
 +    // | RelationalExpression '<=' ShiftExpression
 +    // | RelationalExpression '>=' ShiftExpression
 +    private boolean parseRelationalExpression(ExprTreeHolder tree) throws IOException {
 +        TokenType tt;
 +        if (!parseShiftExpression(tree)) {
 +            return false;
 +        }
 +        tt = lexer.peekToken();
 +        while (tt == TokenType.LEX_LESS_THAN || tt == TokenType.LEX_GREATER_THAN || tt == TokenType.LEX_LESS_OR_EQUAL
 +                || tt == TokenType.LEX_GREATER_OR_EQUAL) {
 +            int op = Operation.OpKind_NO_OP;
 +            ExprTreeHolder treeL = tree;
 +            ExprTreeHolder treeR = objectPool.mutableExprPool.get();
 +            lexer.consumeToken();
 +            parseShiftExpression(treeR);
 +            switch (tt) {
 +                case LEX_LESS_THAN:
 +                    op = Operation.OpKind_LESS_THAN_OP;
 +                    break;
 +                case LEX_LESS_OR_EQUAL:
 +                    op = Operation.OpKind_LESS_OR_EQUAL_OP;
 +                    break;
 +                case LEX_GREATER_THAN:
 +                    op = Operation.OpKind_GREATER_THAN_OP;
 +                    break;
 +                case LEX_GREATER_OR_EQUAL:
 +                    op = Operation.OpKind_GREATER_OR_EQUAL_OP;
 +                    break;
 +                default:
 +                    throw new HyracksDataException("ClassAd:  Should not reach here");
 +            }
 +            if (treeL.getInnerTree() != null && treeR.getInnerTree() != null) {
 +                Operation newTree = objectPool.operationPool.get();
 +                Operation.createOperation(op, treeL, treeR, null, newTree);
 +                tree.setInnerTree(newTree);
 +            } else {
 +                tree.setInnerTree(null);
 +                return false;
 +            }
 +            tt = lexer.peekToken();
 +        }
 +        return true;
 +    }
 +
 +    // ShiftExpression .= AdditiveExpression
 +    // | ShiftExpression '<<' AdditiveExpression
 +    // | ShiftExpression '>>' AdditiveExpression
 +    // | ShiftExpression '>>>' AditiveExpression
 +    private boolean parseShiftExpression(ExprTreeHolder tree) throws IOException {
 +        if (!parseAdditiveExpression(tree)) {
 +            return false;
 +        }
 +
 +        TokenType tt = lexer.peekToken();
 +        while (tt == TokenType.LEX_LEFT_SHIFT || tt == TokenType.LEX_RIGHT_SHIFT || tt == TokenType.LEX_URIGHT_SHIFT) {
 +            ExprTreeHolder treeL = tree;
 +            ExprTreeHolder treeR = objectPool.mutableExprPool.get();
 +            int op;
 +            lexer.consumeToken();
 +            parseAdditiveExpression(treeR);
 +            switch (tt) {
 +                case LEX_LEFT_SHIFT:
 +                    op = Operation.OpKind_LEFT_SHIFT_OP;
 +                    break;
 +                case LEX_RIGHT_SHIFT:
 +                    op = Operation.OpKind_RIGHT_SHIFT_OP;
 +                    break;
 +                case LEX_URIGHT_SHIFT:
 +                    op = Operation.OpKind_URIGHT_SHIFT_OP;
 +                    break;
 +                default:
 +                    op = Operation.OpKind_NO_OP; // Make gcc's -wuninitalized happy
 +                    throw new HyracksDataException("ClassAd:  Should not reach here");
 +            }
 +
 +            if (treeL.getInnerTree() != null && treeR.getInnerTree() != null) {
 +                Operation newTree = objectPool.operationPool.get();
 +                Operation.createOperation(op, treeL, treeR, null, newTree);
 +                tree.setInnerTree(newTree);
 +            } else {
 +                tree.setInnerTree(null);
 +                return false;
 +            }
 +            tt = lexer.peekToken();
 +        }
 +        return true;
 +    }
 +
 +    // AdditiveExpression .= MultiplicativeExpression
 +    // | AdditiveExpression '+' MultiplicativeExpression
 +    // | AdditiveExpression '-' MultiplicativeExpression
 +    private boolean parseAdditiveExpression(ExprTreeHolder tree) throws IOException {
 +        if (!parseMultiplicativeExpression(tree)) {
 +            return false;
 +        }
 +
 +        TokenType tt = lexer.peekToken();
 +        while (tt == TokenType.LEX_PLUS || tt == TokenType.LEX_MINUS) {
 +            ExprTreeHolder treeL = tree;
 +            ExprTreeHolder treeR = objectPool.mutableExprPool.get();
 +            lexer.consumeToken();
 +            parseMultiplicativeExpression(treeR);
 +            if (treeL.getInnerTree() != null && treeR.getInnerTree() != null) {
 +                Operation newTree = objectPool.operationPool.get();
 +                Operation.createOperation(
 +                        (tt == TokenType.LEX_PLUS) ? Operation.OpKind_ADDITION_OP : Operation.OpKind_SUBTRACTION_OP,
 +                        treeL, treeR, null, newTree);
 +                tree.setInnerTree(newTree);
 +            } else {
 +                tree.setInnerTree(null);
 +                return false;
 +            }
 +            tt = lexer.peekToken();
 +        }
 +        return true;
 +    }
 +
 +    // MultiplicativeExpression .= UnaryExpression
 +    // | MultiplicativeExpression '*' UnaryExpression
 +    // | MultiplicativeExpression '/' UnaryExpression
 +    // | MultiplicativeExpression '%' UnaryExpression
 +    private boolean parseMultiplicativeExpression(ExprTreeHolder tree) throws IOException {
 +        if (!parseUnaryExpression(tree)) {
 +            return false;
 +        }
 +
 +        TokenType tt = lexer.peekToken();
 +        while (tt == TokenType.LEX_MULTIPLY || tt == TokenType.LEX_DIVIDE || tt == TokenType.LEX_MODULUS) {
 +            ExprTreeHolder treeL = tree;
 +            ExprTreeHolder treeR = objectPool.mutableExprPool.get();
 +            int op;
 +            lexer.consumeToken();
 +            parseUnaryExpression(treeR);
 +            switch (tt) {
 +                case LEX_MULTIPLY:
 +                    op = Operation.OpKind_MULTIPLICATION_OP;
 +                    break;
 +                case LEX_DIVIDE:
 +                    op = Operation.OpKind_DIVISION_OP;
 +                    break;
 +                case LEX_MODULUS:
 +                    op = Operation.OpKind_MODULUS_OP;
 +                    break;
 +                default:
 +                    op = Operation.OpKind_NO_OP; // Make gcc's -wuninitalized happy
 +                    throw new HyracksDataException("ClassAd:  Should not reach here");
 +            }
 +            if (treeL.getInnerTree() != null && treeR.getInnerTree() != null) {
 +                Operation newTree = objectPool.operationPool.get();
 +                Operation.createOperation(op, treeL, treeR, null, newTree);
 +                tree.setInnerTree(newTree);
 +            } else {
 +                tree.setInnerTree(null);
 +                return false;
 +            }
 +            tt = lexer.peekToken();
 +        }
 +        return true;
 +    }
 +
 +    // UnaryExpression .= PostfixExpression
 +    // | UnaryOperator UnaryExpression
 +    // ( where UnaryOperator is one of { -, +, ~, ! } )
 +    private boolean parseUnaryExpression(ExprTreeHolder tree) throws IOException {
 +        TokenType tt = lexer.peekToken();
 +        if (tt == TokenType.LEX_MINUS || tt == TokenType.LEX_PLUS || tt == TokenType.LEX_BITWISE_NOT
 +                || tt == TokenType.LEX_LOGICAL_NOT) {
 +            lexer.consumeToken();
 +            ExprTreeHolder treeM = objectPool.mutableExprPool.get();
 +            int op = Operation.OpKind_NO_OP;
 +            parseUnaryExpression(treeM);
 +            switch (tt) {
 +                case LEX_MINUS:
 +                    op = Operation.OpKind_UNARY_MINUS_OP;
 +                    break;
 +                case LEX_PLUS:
 +                    op = Operation.OpKind_UNARY_PLUS_OP;
 +                    break;
 +                case LEX_BITWISE_NOT:
 +                    op = Operation.OpKind_BITWISE_NOT_OP;
 +                    break;
 +                case LEX_LOGICAL_NOT:
 +                    op = Operation.OpKind_LOGICAL_NOT_OP;
 +                    break;
 +                default:
 +                    throw new HyracksDataException("ClassAd: Shouldn't Get here");
 +            }
 +            if (treeM.getInnerTree() != null) {
 +                Operation newTree = objectPool.operationPool.get();
 +                Operation.createOperation(op, treeM, null, null, newTree);
 +                tree.setInnerTree(newTree);
 +            } else {
 +                tree.setInnerTree(null);
 +                return (false);
 +            }
 +            return true;
 +        } else {
 +            return parsePostfixExpression(tree);
 +        }
 +    }
 +
 +    // PostfixExpression .= PrimaryExpression
 +    // | PostfixExpression '.' Identifier
 +    // | PostfixExpression '[' Expression ']'
 +    private boolean parsePostfixExpression(ExprTreeHolder tree) throws IOException {
 +        TokenType tt;
 +        if (!parsePrimaryExpression(tree)) {
 +            return false;
 +        }
 +        while ((tt = lexer.peekToken()) == TokenType.LEX_OPEN_BOX || tt == TokenType.LEX_SELECTION) {
 +            ExprTreeHolder treeL = tree;
 +            ExprTreeHolder treeR = objectPool.mutableExprPool.get();
 +            TokenValue tv = objectPool.tokenValuePool.get();
 +            lexer.consumeToken();
 +            if (tt == TokenType.LEX_OPEN_BOX) {
 +                // subscript operation
 +                parseExpression(treeR);
 +                if (treeL.getInnerTree() != null && treeR.getInnerTree() != null) {
 +                    Operation newTree = objectPool.operationPool.get();
 +                    Operation.createOperation(Operation.OpKind_SUBSCRIPT_OP, treeL, treeR, null, newTree);
 +                    if (lexer.consumeToken() == TokenType.LEX_CLOSE_BOX) {
 +                        tree.setInnerTree(newTree);
 +                        continue;
 +                    }
 +                }
 +                tree.setInnerTree(null);
 +                return false;
 +            } else if (tt == TokenType.LEX_SELECTION) {
 +                // field selection operation
 +                if ((tt = lexer.consumeToken(tv)) != TokenType.LEX_IDENTIFIER) {
 +                    throw new HyracksDataException("second argument of selector must be an " + "identifier (got"
 +                            + String.valueOf(Lexer.strLexToken(tt)) + ")");
 +                }
 +                AttributeReference newTree = objectPool.attrRefPool.get();
 +                AttributeReference.createAttributeReference(treeL, tv.getStrValue(), false, newTree);
 +                tree.setInnerTree(newTree);
 +            }
 +        }
 +        return true;
 +    }
 +
 +    // PrimaryExpression .= Identifier
 +    // | FunctionCall
 +    // | '.' Identifier
 +    // | '(' Expression ')'
 +    // | Literal
 +    // FunctionCall .= Identifier ArgumentList
 +    // ( Constant may be
 +    // boolean,undefined,error,string,integer,real,classad,list )
 +    // ( ArgumentList non-terminal includes parentheses )
 +    private boolean parsePrimaryExpression(ExprTreeHolder tree) throws IOException {
 +        ExprTreeHolder treeL;
 +        TokenValue tv = objectPool.tokenValuePool.get();
 +        TokenType tt;
 +        tree.setInnerTree(null);
 +        switch ((tt = lexer.peekToken(tv))) {
 +            // identifiers
 +            case LEX_IDENTIFIER:
 +                isExpr = true;
 +                lexer.consumeToken();
 +                // check for funcion call
 +                if ((tt = lexer.peekToken()) == TokenType.LEX_OPEN_PAREN) {
 +                    ExprList argList = objectPool.exprListPool.get();
 +                    if (!parseArgumentList(argList)) {
 +                        tree.setInnerTree(null);
 +                        return false;
 +                    } ;
 +                    // special case function-calls should be converted
 +                    // into a literal expression if the argument is a
 +                    // string literal
 +                    if (shouldEvaluateAtParseTime(tv.getStrValue().toString(), argList)) {
 +                        tree.setInnerTree(evaluateFunction(tv.getStrValue().toString(), argList));
 +                    } else {
 +                        tree.setInnerTree(
 +                                FunctionCall.createFunctionCall(tv.getStrValue().toString(), argList, objectPool));
 +                    }
 +                } else {
 +                    // I don't think this is ever hit
 +                    tree.setInnerTree(
 +                            AttributeReference.createAttributeReference(null, tv.getStrValue(), false, objectPool));
 +                }
 +                return (tree.getInnerTree() != null);
 +            case LEX_SELECTION:
 +                isExpr = true;
 +                lexer.consumeToken();
 +                if ((tt = lexer.consumeToken(tv)) == TokenType.LEX_IDENTIFIER) {
 +                    // the boolean final arg signifies that reference is absolute
 +                    tree.setInnerTree(
 +                            AttributeReference.createAttributeReference(null, tv.getStrValue(), true, objectPool));
 +                    return (tree.size() != 0);
 +                }
 +                // not an identifier following the '.'
 +                throw new HyracksDataException(
 +                        "need identifier in selection expression (got" + Lexer.strLexToken(tt) + ")");
 +                // parenthesized expression
 +            case LEX_OPEN_PAREN: {
 +                isExpr = true;
 +                lexer.consumeToken();
 +                treeL = objectPool.mutableExprPool.get();
 +                parseExpression(treeL);
 +                if (treeL.getInnerTree() == null) {
 +                    tree.resetExprTree(null);
 +                    return false;
 +                }
 +
 +                if ((tt = lexer.consumeToken()) != TokenType.LEX_CLOSE_PAREN) {
 +                    throw new HyracksDataException("exptected LEX_CLOSE_PAREN, but got " + Lexer.strLexToken(tt));
 +                    // tree.resetExprTree(null);
 +                    // return false;
 +                }
 +                // assume make operation will return a new tree
 +                tree.setInnerTree(Operation.createOperation(Operation.OpKind_PARENTHESES_OP, treeL, objectPool));
 +                return (tree.size() != 0);
 +            }
 +            // constants
 +            case LEX_OPEN_BOX: {
 +                isExpr = true;
 +                ClassAd newAd = objectPool.classAdPool.get();
 +                if (!parseClassAd(newAd)) {
 +                    tree.resetExprTree(null);
 +                    return false;
 +                }
 +                tree.setInnerTree(newAd);
 +            }
 +                return true;
 +
 +            case LEX_OPEN_BRACE: {
 +                isExpr = true;
 +                ExprList newList = objectPool.exprListPool.get();
 +                if (!parseExprList(newList)) {
 +                    tree.setInnerTree(null);
 +                    return false;
 +                }
 +                tree.setInnerTree(newList);
 +            }
 +                return true;
 +
 +            case LEX_UNDEFINED_VALUE: {
 +                Value val = objectPool.valuePool.get();
 +                lexer.consumeToken();
 +                val.setUndefinedValue();
 +                tree.setInnerTree(Literal.createLiteral(val, objectPool));
 +                return (tree.getInnerTree() != null);
 +            }
 +            case LEX_ERROR_VALUE: {
 +                Value val = objectPool.valuePool.get();
 +                lexer.consumeToken();
 +                val.setErrorValue();
 +                tree.setInnerTree(Literal.createLiteral(val, objectPool));
 +                return (tree.getInnerTree() != null);
 +            }
 +            case LEX_BOOLEAN_VALUE: {
 +                Value val = objectPool.valuePool.get();
 +                MutableBoolean b = new MutableBoolean();
 +                tv.getBoolValue(b);
 +                lexer.consumeToken();
 +                val.setBooleanValue(b);
 +                tree.setInnerTree(Literal.createLiteral(val, objectPool));
 +                return (tree.getInnerTree() != null);
 +            }
 +
 +            case LEX_INTEGER_VALUE: {
 +                Value val = objectPool.valuePool.get();
 +                lexer.consumeToken();
 +                val.setIntegerValue(tv.getIntValue());
 +                tree.setInnerTree(Literal.createLiteral(val, tv.getFactor(), objectPool));
 +                return (tree.getInnerTree() != null);
 +            }
 +
 +            case LEX_REAL_VALUE: {
 +                Value val = objectPool.valuePool.get();
 +                lexer.consumeToken();
 +                val.setRealValue(tv.getRealValue());
 +                tree.setInnerTree(Literal.createLiteral(val, tv.getFactor(), objectPool));
 +                return (tree.getInnerTree() != null);
 +            }
 +
 +            case LEX_STRING_VALUE: {
 +                Value val = objectPool.valuePool.get();
 +                lexer.consumeToken();
 +                val.setStringValue(tv.getStrValue());
 +                tree.setInnerTree(Literal.createLiteral(val, objectPool));
 +                return (tree.getInnerTree() != null);
 +            }
 +
 +            case LEX_ABSOLUTE_TIME_VALUE: {
 +                Value val = objectPool.valuePool.get();
 +                lexer.consumeToken();
 +                val.setAbsoluteTimeValue(tv.getTimeValue());
 +                tree.setInnerTree(Literal.createLiteral(val, objectPool));
 +                return (tree.getInnerTree() != null);
 +            }
 +
 +            case LEX_RELATIVE_TIME_VALUE: {
 +                Value val = objectPool.valuePool.get();
 +                lexer.consumeToken();
 +                val.setRelativeTimeValue(tv.getTimeValue().getRelativeTime());
 +                tree.setInnerTree(Literal.createLiteral(val, objectPool));
 +                return (tree.getInnerTree() != null);
 +            }
 +
 +            default:
 +                tree.setInnerTree(null);
 +                return false;
 +        }
 +    }
 +
 +    // ArgumentList .= '(' ListOfArguments ')'
 +    // ListOfArguments .= (epsilon)
 +    // | ListOfArguments ',' Expression
 +    public boolean parseArgumentList(ExprList argList) throws IOException {
 +        TokenType tt;
 +        argList.clear();
 +        if ((tt = lexer.consumeToken()) != TokenType.LEX_OPEN_PAREN) {
 +            throw new HyracksDataException("expected LEX_OPEN_PAREN but got " + String.valueOf(Lexer.strLexToken(tt)));
 +            // return false;
 +        }
 +        tt = lexer.peekToken();
 +        ExprTreeHolder tree = objectPool.mutableExprPool.get();
 +        while (tt != TokenType.LEX_CLOSE_PAREN) {
 +            // parse the expression
 +            tree.reset();
 +            parseExpression(tree);
 +            if (tree.getInnerTree() == null) {
 +                argList.clear();
 +                return false;
 +            }
 +            // insert the expression into the argument list
 +            argList.add(tree.getInnerTree());
 +            // the next token must be a ',' or a ')'
 +            // or it can be a ';' if using old ClassAd semantics
 +            tt = lexer.peekToken();
 +            if (tt == TokenType.LEX_COMMA || (tt == TokenType.LEX_SEMICOLON && false)) {
 +                lexer.consumeToken();
 +            } else if (tt != TokenType.LEX_CLOSE_PAREN) {
 +                argList.clear();
 +                throw new HyracksDataException(
 +                        "expected LEX_COMMA or LEX_CLOSE_PAREN but got " + String.valueOf(Lexer.strLexToken(tt)));
 +                // return false;
 +            }
 +        }
 +        lexer.consumeToken();
 +        return true;
 +    }
 +
 +    // ClassAd .= '[' AttributeList ']'
 +    // AttributeList .= (epsilon)
 +    // | Attribute ';' AttributeList
 +    // Attribute .= Identifier '=' Expression
 +    public boolean parseClassAd(ClassAd ad) throws IOException {
 +        return parseClassAd(ad, false);
 +    }
 +
 +    public boolean parseClassAdOld(ClassAd ad, boolean full) throws IOException {
 +        return false;
 +    }
 +
 +    public boolean parseClassAd(ClassAd ad, boolean full) throws IOException {
 +        TokenType tt;
 +        ad.clear();
 +        if ((tt = lexer.consumeToken()) != TokenType.LEX_OPEN_BOX) {
 +            return false;
 +        }
 +        tt = lexer.peekToken();
 +        TokenValue tv = objectPool.tokenValuePool.get();
 +        ExprTreeHolder tree = objectPool.mutableExprPool.get();
 +        while (tt != TokenType.LEX_CLOSE_BOX) {
 +            // Get the name of the expression
 +            tv.reset();
 +            tree.reset();
 +            tt = lexer.consumeToken(tv);
 +            if (tt == TokenType.LEX_SEMICOLON) {
 +                // We allow empty expressions, so if someone give a double
 +                // semicolon, it doesn't
 +                // hurt. Technically it's not right, but we shouldn't make users
 +                // pay the price for
 +                // a meaningless mistake. See condor-support #1881 for a user
 +                // that was bitten by this.
 +                continue;
 +            }
 +            if (tt != TokenType.LEX_IDENTIFIER) {
 +                throw new HyracksDataException(
 +                        "while parsing classad:  expected LEX_IDENTIFIER " + " but got " + Lexer.strLexToken(tt));
 +            }
 +
 +            // consume the intermediate '='
 +            if ((tt = lexer.consumeToken()) != TokenType.LEX_BOUND_TO) {
 +                throw new HyracksDataException(
 +                        "while parsing classad:  expected LEX_BOUND_TO " + " but got " + Lexer.strLexToken(tt));
 +            }
 +
 +            isExpr = false;
 +            // parse the expression
 +            parseExpression(tree);
 +            if (tree.getInnerTree() == null) {
 +                throw new HyracksDataException("parse expression returned empty tree");
 +            }
 +
 +            // insert the attribute into the classad
 +            if (!ad.insert(tv.getStrValue().toString(), tree)) {
 +                throw new HyracksDataException("Couldn't insert value to classad");
 +            }
 +
 +            // the next token must be a ';' or a ']'
 +            tt = lexer.peekToken();
 +            if (tt != TokenType.LEX_SEMICOLON && tt != TokenType.LEX_CLOSE_BOX) {
 +                throw new HyracksDataException("while parsing classad:  expected LEX_SEMICOLON or "
 +                        + "LEX_CLOSE_BOX but got " + Lexer.strLexToken(tt));
 +            }
 +
 +            // Slurp up any extra semicolons. This does not duplicate the work
 +            // at the top of the loop
 +            // because it accounts for the case where the last expression has
 +            // extra semicolons,
 +            // while the first case accounts for optional beginning semicolons.
 +            while (tt == TokenType.LEX_SEMICOLON) {
 +                lexer.consumeToken();
 +                tt = lexer.peekToken();
 +            }
 +        }
 +        lexer.consumeToken();
 +        // if a full parse was requested, ensure that input is exhausted
 +        if (full && (lexer.consumeToken() != TokenType.LEX_END_OF_INPUT)) {
 +            throw new HyracksDataException("while parsing classad:  expected LEX_END_OF_INPUT for "
 +                    + "full parse but got " + Lexer.strLexToken(tt));
 +        }
 +        return true;
 +    }
 +
 +    // ExprList .= '{' ListOfExpressions '}'
 +    // ListOfExpressions .= (epsilon)
 +    // | Expression ',' ListOfExpressions
 +    public boolean parseExprList(ExprList list) throws IOException {
 +        return parseExprList(list, false);
 +    }
 +
 +    public boolean parseExprList(ExprList list, boolean full) throws IOException {
 +        TokenType tt;
 +        ExprTreeHolder tree = objectPool.mutableExprPool.get();
 +        ExprList loe = objectPool.exprListPool.get();
 +
 +        if ((tt = lexer.consumeToken()) != TokenType.LEX_OPEN_BRACE) {
 +            throw new HyracksDataException(
 +                    "while parsing expression list:  expected LEX_OPEN_BRACE" + " but got " + Lexer.strLexToken(tt));
 +            // return false;
 +        }
 +        tt = lexer.peekToken();
 +        while (tt != TokenType.LEX_CLOSE_BRACE) {
 +            // parse the expression
 +            parseExpression(tree);
 +            if (tree.getInnerTree() == null) {
 +                throw new HyracksDataException("while parsing expression list:  expected "
 +                        + "LEX_CLOSE_BRACE or LEX_COMMA but got " + Lexer.strLexToken(tt));
 +            }
 +
 +            // insert the expression into the list
 +            loe.add(tree);
 +
 +            // the next token must be a ',' or a '}'
 +            tt = lexer.peekToken();
 +            if (tt == TokenType.LEX_COMMA) {
 +                lexer.consumeToken();
 +            } else if (tt != TokenType.LEX_CLOSE_BRACE) {
 +                throw new HyracksDataException("while parsing expression list:  expected "
 +                        + "LEX_CLOSE_BRACE or LEX_COMMA but got " + Lexer.strLexToken(tt));
 +            }
 +        }
 +
 +        lexer.consumeToken();
 +        list.setValue(ExprList.createExprList(loe, objectPool));
 +
 +        // if a full parse was requested, ensure that input is exhausted
 +        if (full && (lexer.consumeToken() != TokenType.LEX_END_OF_INPUT)) {
 +            list.clear();
 +            throw new HyracksDataException("while parsing expression list:  expected "
 +                    + "LEX_END_OF_INPUT for full parse but got " + Lexer.strLexToken(tt));
 +        }
 +        return true;
 +    }
 +
 +    public boolean shouldEvaluateAtParseTime(String functionName, ExprList argList) throws HyracksDataException {
 +        boolean should_eval = false;
 +        if (functionName.equalsIgnoreCase("absTime") || functionName.equalsIgnoreCase("relTime")) {
 +            if (argList.size() == 1 && argList.get(0).getKind() == NodeKind.LITERAL_NODE) {
 +                Value val = objectPool.valuePool.get();
 +                AMutableNumberFactor factor = objectPool.numFactorPool.get();
 +                ((Literal) argList.get(0)).getComponents(val, factor);
 +                if (val.isStringValue()) {
 +                    should_eval = true;
 +                }
 +            }
 +        }
 +        return should_eval;
 +    }
 +
 +    public ExprTree evaluateFunction(String functionName, ExprList argList) throws HyracksDataException {
 +        Value val = objectPool.valuePool.get();
 +        AMutableNumberFactor factor = objectPool.numFactorPool.get();
 +        ExprTreeHolder tree = objectPool.mutableExprPool.get();
 +        ((Literal) argList.get(0)).getComponents(val, factor);
 +
 +        AMutableCharArrayString string_value = objectPool.strPool.get();
 +        if (val.isStringValue(string_value)) {
 +            if (functionName.equalsIgnoreCase("absTime")) {
 +                tree.setInnerTree(Literal.createAbsTime(string_value, objectPool));
 +            } else if (functionName.equalsIgnoreCase("relTime")) {
 +                tree.setInnerTree(Literal.createRelTime(string_value, objectPool));
 +            } else {
 +                tree.setInnerTree(FunctionCall.createFunctionCall(functionName, argList, objectPool));
 +            }
 +        } else {
 +            tree.setInnerTree(FunctionCall.createFunctionCall(functionName, argList, objectPool));
 +        }
 +        return tree;
 +    }
 +
 +    public TokenType peekToken() throws IOException {
 +        if (lexer.wasInitialized()) {
 +            return lexer.peekToken();
 +        } else {
 +            return TokenType.LEX_TOKEN_ERROR;
 +        }
 +    }
 +
 +    public TokenType consumeToken() throws IOException {
 +        if (lexer.wasInitialized()) {
 +            return lexer.consumeToken();
 +        } else {
 +            return TokenType.LEX_TOKEN_ERROR;
 +        }
 +    }
 +
 +    public boolean parseExpression(String buf, ExprTreeHolder tree) throws IOException {
 +        return parseExpression(buf, tree, false);
 +    }
 +
 +    public boolean parseExpression(String buf, ExprTreeHolder tree, boolean full) throws IOException {
 +        boolean success;
 +        StringLexerSource lexer_source = new StringLexerSource(buf);
 +
 +        success = false;
 +        if (lexer.initialize(lexer_source)) {
 +            success = parseExpression(tree, full);
 +        }
 +        return success;
 +    }
 +
 +    public ClassAd parseClassAd(String input_basic) throws IOException {
 +        return parseClassAd(input_basic, false);
 +    }
 +
 +    public LexerSource getLexerSource() {
 +        return currentSource;
 +    }
 +
 +    public void setLexerSource(LexerSource lexerSource) {
 +        this.currentSource = lexerSource;
 +    }
 +
 +    @Override
 +    public void parse(IRawRecord<? extends char[]> record, DataOutput out) throws IOException {
 +        try {
 +            resetPools();
 +            if (oldFormat) {
 +                int maxOffset = record.size();
 +                rootAd.clear();
 +                char[] buffer = record.get();
 +                aInt32.setValue(0);
 +                String line = readLine(buffer, aInt32, maxOffset);
 +                while (line != null) {
 +                    if (line.trim().length() == 0) {
 +                        if (rootAd.size() == 0) {
 +                            line = readLine(buffer, aInt32, maxOffset);
 +                            continue;
 +                        }
 +                        break;
 +                    } else if (!rootAd.insert(line)) {
 +                        throw new HyracksDataException("Couldn't parse expression in line: " + line);
 +                    }
 +                    line = readLine(buffer, aInt32, maxOffset);
 +                }
 +            } else {
 +                currentSource.setNewSource(record.get());
 +                rootAd.reset();
 +                asterixParseClassAd(rootAd);
 +            }
 +            parseRecord(recordType, rootAd, out);
 +        } catch (Exception e) {
 +            throw new HyracksDataException(e);
 +        }
 +    }
 +}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-external-data/src/test/java/org/apache/asterix/external/library/adapter/TestTypedAdapterFactory.java
----------------------------------------------------------------------
diff --cc asterixdb/asterix-external-data/src/test/java/org/apache/asterix/external/library/adapter/TestTypedAdapterFactory.java
index 7da6389,0000000..d1a4532
mode 100644,000000..100644
--- a/asterixdb/asterix-external-data/src/test/java/org/apache/asterix/external/library/adapter/TestTypedAdapterFactory.java
+++ b/asterixdb/asterix-external-data/src/test/java/org/apache/asterix/external/library/adapter/TestTypedAdapterFactory.java
@@@ -1,137 -1,0 +1,137 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +package org.apache.asterix.external.library.adapter;
 +
 +import java.io.IOException;
 +import java.io.InputStream;
 +import java.util.Map;
 +
 +import org.apache.asterix.common.cluster.ClusterPartition;
 +import org.apache.asterix.common.config.IAsterixPropertiesProvider;
 +import org.apache.asterix.common.exceptions.AsterixException;
 +import org.apache.asterix.external.api.IAdapterFactory;
 +import org.apache.asterix.external.api.IDataSourceAdapter;
 +import org.apache.asterix.external.api.IExternalDataSourceFactory;
 +import org.apache.asterix.external.api.ITupleForwarder;
 +import org.apache.asterix.external.parser.ADMDataParser;
 +import org.apache.asterix.external.util.DataflowUtils;
 +import org.apache.asterix.external.util.ExternalDataUtils;
 +import org.apache.asterix.external.util.FeedUtils;
 +import org.apache.asterix.om.types.ARecordType;
 +import org.apache.hyracks.algebricks.common.constraints.AlgebricksAbsolutePartitionConstraint;
 +import org.apache.hyracks.api.comm.IFrameWriter;
 +import org.apache.hyracks.api.context.IHyracksTaskContext;
 +import org.apache.hyracks.api.exceptions.HyracksDataException;
 +import org.apache.hyracks.control.nc.NodeControllerService;
 +import org.apache.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
 +import org.apache.hyracks.dataflow.std.file.ITupleParser;
 +import org.apache.hyracks.dataflow.std.file.ITupleParserFactory;
 +
 +public class TestTypedAdapterFactory implements IAdapterFactory {
 +
 +    private static final long serialVersionUID = 1L;
 +
 +    private ARecordType outputType;
 +
 +    public static final String KEY_NUM_OUTPUT_RECORDS = "num_output_records";
 +
 +    private Map<String, String> configuration;
 +
 +    private transient AlgebricksAbsolutePartitionConstraint clusterLocations;
 +
 +    @Override
 +    public String getAlias() {
 +        return "test_typed";
 +    }
 +
 +    @Override
 +    public AlgebricksAbsolutePartitionConstraint getPartitionConstraint() throws AsterixException {
 +        clusterLocations = IExternalDataSourceFactory.getPartitionConstraints(clusterLocations, 1);
 +        return clusterLocations;
 +    }
 +
 +    @Override
 +    public IDataSourceAdapter createAdapter(IHyracksTaskContext ctx, int partition) throws HyracksDataException {
 +        final String nodeId = ctx.getJobletContext().getApplicationContext().getNodeId();
 +        final ITupleParserFactory tupleParserFactory = new ITupleParserFactory() {
 +            private static final long serialVersionUID = 1L;
 +
 +            @Override
 +            public ITupleParser createTupleParser(IHyracksTaskContext ctx) throws HyracksDataException {
 +                ADMDataParser parser;
 +                ITupleForwarder forwarder;
 +                ArrayTupleBuilder tb;
 +                IAsterixPropertiesProvider propertiesProvider = (IAsterixPropertiesProvider) ((NodeControllerService) ctx
 +                        .getJobletContext().getApplicationContext().getControllerService()).getApplicationContext()
 +                                .getApplicationObject();
-                 ClusterPartition[] nodePartitions = propertiesProvider.getMetadataProperties().getNodePartitions()
-                         .get(nodeId);
++                ClusterPartition nodePartition = propertiesProvider.getMetadataProperties().getNodePartitions()
++                        .get(nodeId)[0];
 +                try {
 +                    parser = new ADMDataParser(outputType, true);
 +                    forwarder = DataflowUtils
 +                            .getTupleForwarder(configuration,
 +                                    FeedUtils.getFeedLogManager(ctx,
 +                                            FeedUtils.splitsForAdapter(ExternalDataUtils.getDataverse(configuration),
-                                                     ExternalDataUtils.getFeedName(configuration), partition,
-                                                     nodePartitions)));
++                                                    ExternalDataUtils.getFeedName(configuration), nodeId,
++                                                    nodePartition)));
 +                    tb = new ArrayTupleBuilder(1);
 +                } catch (Exception e) {
 +                    throw new HyracksDataException(e);
 +                }
 +                return new ITupleParser() {
 +
 +                    @Override
 +                    public void parse(InputStream in, IFrameWriter writer) throws HyracksDataException {
 +                        try {
 +                            parser.setInputStream(in);
 +                            forwarder.initialize(ctx, writer);
 +                            while (true) {
 +                                tb.reset();
 +                                if (!parser.parse(tb.getDataOutput())) {
 +                                    break;
 +                                }
 +                                tb.addFieldEndOffset();
 +                                forwarder.addTuple(tb);
 +                            }
 +                            forwarder.close();
 +                        } catch (Exception e) {
 +                            throw new HyracksDataException(e);
 +                        }
 +                    }
 +                };
 +            }
 +        };
 +        try {
 +            return new TestTypedAdapter(tupleParserFactory, outputType, ctx, configuration, partition);
 +        } catch (IOException e) {
 +            throw new HyracksDataException(e);
 +        }
 +    }
 +
 +    @Override
 +    public ARecordType getAdapterOutputType() {
 +        return outputType;
 +    }
 +
 +    @Override
 +    public void configure(Map<String, String> configuration, ARecordType outputType, ARecordType metaType) {
 +        this.configuration = configuration;
 +        this.outputType = outputType;
 +    }
 +}



[11/50] [abbrv] incubator-asterixdb git commit: Merge branch 'master' into hyracks-merge2

Posted by im...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-installer/src/test/resources/integrationts/library/results/library-parsers/record-parser/record-parser.1.adm
----------------------------------------------------------------------
diff --cc asterixdb/asterix-installer/src/test/resources/integrationts/library/results/library-parsers/record-parser/record-parser.1.adm
index cabaa67,0000000..f54dee9
mode 100644,000000..100644
--- a/asterixdb/asterix-installer/src/test/resources/integrationts/library/results/library-parsers/record-parser/record-parser.1.adm
+++ b/asterixdb/asterix-installer/src/test/resources/integrationts/library/results/library-parsers/record-parser/record-parser.1.adm
@@@ -1,100 -1,0 +1,100 @@@
- { "GlobalJobId": "submit-3.chtc.wisc.edu#48968872.0#1445354636", "StatsLifetimeStarter": 572059, "JobStartDate": 1445362267, "SubmitEventNotes": "DAG Node: fabp4-0002+fabp4-0002", "JobStatus": 4, "LeaveJobInQueue": false, "WantGlidein": true, "StartdPrincipal": "execute-side@matchsession/128.104.119.175", "WantRHEL6": true, "OnExitRemove": true, "JobCurrentStartDate": 1445561276, "CoreSize": 0, "MATCH_EXP_JOBGLIDEIN_ResourceName": "wisc.edu", "Rank": 0.0d, "ExitStatus": 0, "ResidentSetSize": 125000, "ScheddBday": 1445383086, "RemoteWallClockTime": 769511.0d, "WantCheckpoint": false, "In": "/dev/null", "LastVacateTime": 1445546251, "MaxHosts": 1, "RootDir": "/", "NumRestarts": 0, "RecentBlockWriteKbytes": 0, "DiskUsage_RAW": 32543, "EnteredCurrentStatus": 1446133322, "ResidentSetSize_RAW": 100432, "RequestDisk": 1000000, "MyType": "Job", "PeriodicRemove": false, "Cmd": "/home/ssericksen/dude-14-xdock/ChtcRun/chtcjobwrapper", "CondorVersion": "$CondorVersion: 8.5.0 Sep 16 2015 BuildI
 D: 341710 $", "ShouldTransferFiles": "YES", "TargetType": "Machine", "MinHosts": 1, "NumCkpts_RAW": 0, "RequestCpus": 1, "RemoteUserCpu": 571737.0d, "BlockWrites": 0, "NiceUser": false, "Out": "process.out", "ImageSize_RAW": 690056, "BytesSent": 3113566.0d, "CumulativeSuspensionTime": 0, "TransferIn": false, "NumCkpts": 0, "Err": "process.err", "RecentBlockWrites": 0, "JobFinishedHookDone": 1446133322, "ProcId": 0, "ImageSize": 750000, "JobUniverse": 5, "EncryptExecuteDirectory": false, "TransferInputSizeMB": 12, "RecentBlockReads": 0, "SpooledOutputFiles": "ChtcWrapperfabp4-0002.out,AuditLog.fabp4-0002,poses.mol2,CURLTIME_4057178,harvest.log,time_elapsed.log,surf_scores.txt,CURLTIME_38803,count.log,fabp4-0002.out,CURLTIME_253463", "NumJobReconnects": 1, "WantFlocking": true, "BlockWriteKbytes": 0, "WhenToTransferOutput": "ON_EXIT_OR_EVICT", "JobCurrentStartExecutingDate": 1445561278, "ExitBySignal": false, "LastMatchTime": 1445561276, "OnExitHold": false, "OrigMaxHosts": 1, "Reques
 tMemory": 1000, "NumJobStarts": 6, "TerminationPending": true, "TotalSuspensions": 0, "BlockReads": 0, "DAGManJobId": 48940805, "MemoryUsage": "null( ( ResidentSetSize + 1023 ) / 1024 ); null", "ExitCode": 0, "JobNotification": 0, "BlockReadKbytes": 0, "NumJobMatches": 6, "LocalUserCpu": 0.0d, "LastJobStatus": 2, "BufferBlockSize": 32768, "CommittedTime": 572046, "ExecutableSize_RAW": 6, "LastRejMatchReason": "no match found", "LastSuspensionTime": 0, "UserLog": "/home/ssericksen/dude-14-xdock/ChtcRun/dude14-surf-out-esr1/fabp4-0002/process.log", "DAGManNodesMask": "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27", "CumulativeSlotTime": 769511.0d, "LastJobLeaseRenewal": 1446133322, "MachineAttrSlotWeight0": 1, "NumSystemHolds": 0, "BytesRecvd": 8.7351688E7d, "CondorPlatform": "$CondorPlatform: X86_64-RedHat_6.6 $", "JOBGLIDEIN_ResourceName": "$$([IfThenElse(IsUndefined(TARGET.GLIDEIN_ResourceName), IfThenElse(IsUndefined(TARGET.GLIDEIN_Site), \"wisc.edu\", TARGET.GLIDEIN_Site), TARGET.GLIDEIN
 _ResourceName)])", "DAGNodeName": "fabp4-0002+fabp4-0002", "PeriodicRelease": "null( JobStatus == 5 ) && ( ( CurrentTime - EnteredCurrentStatus ) > 1800 ) && ( JobRunCount < 5 ) && ( HoldReasonCode != 6 ) && ( HoldReasonCode != 14 ) && ( HoldReasonCode != 22 ); null", "JobRunCount": 7, "LastRemotePool": "condor.biochem.wisc.edu:9618?sock=collector", "JobLastStartDate": 1445546257, "LastRemoteHost": "slot1@cluster-0008.biochem.wisc.edu", "JobPrio": 0, "LocalSysCpu": 0.0d, "ExecutableSize": 7, "RemoteSysCpu": 0.0d, "TransferInput": "/home/ssericksen/dude-14-xdock/ChtcRun/dude14-surf-in/fabp4-0002/,/home/ssericksen/dude-14-xdock/ChtcRun/dude14-surf-in/shared/", "PeriodicHold": false, "WantRemoteIO": true, "CommittedSuspensionTime": 0, "DAGParentNodeNames": "", "CompletionDate": 1446133322, "StreamErr": false, "is_resumable": true, "RecentBlockReadKbytes": 0, "WantRemoteSyscalls": false, "NumShadowStarts": 7, "MachineAttrCpus0": 1, "DAGManNodesLog": "/home/ssericksen/dude-14-xdock/ChtcR
 un/dude14-surf-out-esr1/./mydag.dag.nodes.log", "Owner": "ssericksen", "Requirements": "null( ( OpSysMajorVer is 6 ) ) && ( MY.JobUniverse == 12 || MY.JobUniverse == 7 || MY.WantFlocking || MY.WantGlidein || TARGET.PoolName == \"CHTC\" || TARGET.COLLECTOR_HOST_STRING == \"infopool.cs.wisc.edu\" ) && ( TARGET.Arch == \"X86_64\" ) && ( TARGET.OpSys == \"LINUX\" ) && ( TARGET.Disk >= RequestDisk ) && ( TARGET.Memory >= RequestMemory ) && ( TARGET.HasFileTransfer ); null", "DiskUsage": 35000, "LastRejMatchTime": 1445375317, "JobLeaseDuration": 2400, "ClusterId": 48968872, "BufferSize": 524288, "IsCHTCSubmit": true, "RecentStatsLifetimeStarter": 1200, "CommittedSlotTime": 572046.0d, "Args": "--type=Other --cmdtorun=surflex_run_DUDE_v1.8_esr1.sh --unique=fabp4-0002 --", "Environment": "", "LastPublicClaimId": "<128.104.119.175:9618>#1444067179#3317#...", "Iwd": "/home/ssericksen/dude-14-xdock/ChtcRun/dude14-surf-out-esr1/fabp4-0002", "QDate": 1445354636, "CurrentHosts": 0, "User": "sseric
 ksen@chtc.wisc.edu", "StreamOut": false }
- { "GlobalJobId": "submit-3.chtc.wisc.edu#49573720.0#1445938922", "StatsLifetimeStarter": 190245, "JobStartDate": 1445943852, "SubmitEventNotes": "DAG Node: 180+180", "JobStatus": 4, "LeaveJobInQueue": false, "StartdPrincipal": "execute-side@matchsession/128.105.245.72", "WantRHEL6": true, "OnExitRemove": true, "JobCurrentStartDate": 1445943852, "CoreSize": 0, "MATCH_EXP_JOBGLIDEIN_ResourceName": "wisc.edu", "Rank": 0.0d, "ExitStatus": 0, "ResidentSetSize": 125000, "RemoteWallClockTime": 190247.0d, "WantCheckpoint": false, "In": "/dev/null", "MaxHosts": 1, "RootDir": "/", "NumRestarts": 0, "RecentBlockWriteKbytes": 0, "DiskUsage_RAW": 1216669, "EnteredCurrentStatus": 1446134099, "ResidentSetSize_RAW": 123680, "RequestDisk": 1000000, "MyType": "Job", "PeriodicRemove": false, "Cmd": "/home/xguo23/finally/Simulation_condor/chtcjobwrapper", "CondorVersion": "$CondorVersion: 8.5.0 Sep 16 2015 BuildID: 341710 $", "ShouldTransferFiles": "YES", "TargetType": "Machine", "MinHosts": 1, "NumCk
 pts_RAW": 0, "RequestCpus": 1, "RemoteUserCpu": 185236.0d, "BlockWrites": 0, "NiceUser": false, "Out": "process.out", "ImageSize_RAW": 811948, "BytesSent": 30766.0d, "CumulativeSuspensionTime": 0, "TransferIn": false, "NumCkpts": 0, "Err": "process.err", "RecentBlockWrites": 0, "JobFinishedHookDone": 1446134099, "ProcId": 0, "ImageSize": 1000000, "JobUniverse": 5, "EncryptExecuteDirectory": false, "TransferInputSizeMB": 0, "RecentBlockReads": 0, "SpooledOutputFiles": "CURLTIME_3753852,ChtcWrapper180.out,AuditLog.180,simu_3_180.txt,harvest.log,180.out", "BlockWriteKbytes": 0, "WhenToTransferOutput": "ON_EXIT", "JobCurrentStartExecutingDate": 1445943853, "ExitBySignal": false, "LastMatchTime": 1445943852, "OnExitHold": false, "OrigMaxHosts": 1, "RequestMemory": 1000, "NumJobStarts": 1, "TerminationPending": true, "TotalSuspensions": 0, "BlockReads": 0, "DAGManJobId": 49572657, "MemoryUsage": "null( ( ResidentSetSize + 1023 ) / 1024 ); null", "ExitCode": 0, "JobNotification": 0, "Block
 ReadKbytes": 0, "NumJobMatches": 1, "LocalUserCpu": 0.0d, "LastJobStatus": 2, "BufferBlockSize": 32768, "CommittedTime": 190247, "ExecutableSize_RAW": 6, "LastSuspensionTime": 0, "Matlab": "R2011b", "UserLog": "/home/xguo23/finally/Simulation_condor/model_3/180/process.log", "DAGManNodesMask": "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27", "CumulativeSlotTime": 190247.0d, "LastJobLeaseRenewal": 1446134099, "MachineAttrSlotWeight0": 1, "NumSystemHolds": 0, "BytesRecvd": 284367.0d, "CondorPlatform": "$CondorPlatform: X86_64-RedHat_6.6 $", "JOBGLIDEIN_ResourceName": "$$([IfThenElse(IsUndefined(TARGET.GLIDEIN_ResourceName), IfThenElse(IsUndefined(TARGET.GLIDEIN_Site), \"wisc.edu\", TARGET.GLIDEIN_Site), TARGET.GLIDEIN_ResourceName)])", "DAGNodeName": "180+180", "PeriodicRelease": "null( JobStatus == 5 ) && ( ( CurrentTime - EnteredCurrentStatus ) > 1800 ) && ( JobRunCount < 5 ) && ( HoldReasonCode != 6 ) && ( HoldReasonCode != 14 ) && ( HoldReasonCode != 22 ); null", "JobRunCount": 1, "LastRe
 moteHost": "slot1@e272.chtc.wisc.edu", "JobPrio": 0, "LocalSysCpu": 0.0d, "ExecutableSize": 7, "RemoteSysCpu": 1835.0d, "TransferInput": "/home/xguo23/finally/Simulation_condor/data/180/,/home/xguo23/finally/Simulation_condor/data/shared/", "PeriodicHold": false, "WantRemoteIO": true, "CommittedSuspensionTime": 0, "DAGParentNodeNames": "", "CompletionDate": 1446134099, "StreamErr": false, "RecentBlockReadKbytes": 0, "WantRemoteSyscalls": false, "NumShadowStarts": 1, "MachineAttrCpus0": 1, "DAGManNodesLog": "/home/xguo23/finally/Simulation_condor/model_3/./mydag.dag.nodes.log", "Owner": "xguo23", "Requirements": "null( ( OpSysMajorVer is 6 ) ) && ( MY.JobUniverse == 12 || MY.JobUniverse == 7 || MY.WantFlocking || MY.WantGlidein || TARGET.PoolName == \"CHTC\" || TARGET.COLLECTOR_HOST_STRING == \"infopool.cs.wisc.edu\" ) && ( TARGET.Arch == \"X86_64\" ) && ( TARGET.OpSys == \"LINUX\" ) && ( TARGET.Disk >= RequestDisk ) && ( TARGET.Memory >= RequestMemory ) && ( TARGET.HasFileTransfer )
 ; null", "DiskUsage": 1250000, "JobLeaseDuration": 2400, "ClusterId": 49573720, "BufferSize": 524288, "IsCHTCSubmit": true, "RecentStatsLifetimeStarter": 1200, "CommittedSlotTime": 190247.0d, "Args": "--type=Matlab --version=R2011b --cmdtorun=simu_condor --unique=180 -- 3", "Environment": "", "LastPublicClaimId": "<128.105.245.72:29075>#1444753997#6000#...", "Iwd": "/home/xguo23/finally/Simulation_condor/model_3/180", "QDate": 1445938922, "CurrentHosts": 0, "User": "xguo23@chtc.wisc.edu", "StreamOut": false }
- { "GlobalJobId": "submit-3.chtc.wisc.edu#49581952.0#1446105329", "StatsLifetimeStarter": 27674, "JobStartDate": 1446106061, "SubmitEventNotes": "DAG Node: 40+40", "JobStatus": 4, "LeaveJobInQueue": false, "StartdPrincipal": "execute-side@matchsession/128.105.245.86", "WantRHEL6": true, "OnExitRemove": true, "JobCurrentStartDate": 1446106061, "CoreSize": 0, "MATCH_EXP_JOBGLIDEIN_ResourceName": "wisc.edu", "Rank": 0.0d, "ExitStatus": 0, "ResidentSetSize": 150000, "RemoteWallClockTime": 27676.0d, "WantCheckpoint": false, "In": "/dev/null", "MaxHosts": 1, "RootDir": "/", "NumRestarts": 0, "RecentBlockWriteKbytes": 0, "DiskUsage_RAW": 1216669, "EnteredCurrentStatus": 1446133737, "ResidentSetSize_RAW": 127252, "RequestDisk": 1000000, "MyType": "Job", "PeriodicRemove": false, "Cmd": "/home/xguo23/finally_2/Simulation_condor/chtcjobwrapper", "CondorVersion": "$CondorVersion: 8.5.0 Sep 16 2015 BuildID: 341710 $", "ShouldTransferFiles": "YES", "TargetType": "Machine", "MinHosts": 1, "NumCkpt
 s_RAW": 0, "RequestCpus": 1, "RemoteUserCpu": 27510.0d, "BlockWrites": 0, "NiceUser": false, "Out": "process.out", "ImageSize_RAW": 811948, "BytesSent": 30584.0d, "CumulativeSuspensionTime": 0, "TransferIn": false, "NumCkpts": 0, "Err": "process.err", "RecentBlockWrites": 0, "JobFinishedHookDone": 1446133737, "ProcId": 0, "ImageSize": 1000000, "JobUniverse": 5, "EncryptExecuteDirectory": false, "TransferInputSizeMB": 0, "RecentBlockReads": 0, "SpooledOutputFiles": "CURLTIME_369560,ChtcWrapper40.out,AuditLog.40,simu_3_40.txt,harvest.log,40.out", "BlockWriteKbytes": 0, "WhenToTransferOutput": "ON_EXIT", "JobCurrentStartExecutingDate": 1446106063, "ExitBySignal": false, "LastMatchTime": 1446106061, "OnExitHold": false, "OrigMaxHosts": 1, "RequestMemory": 1000, "NumJobStarts": 1, "TerminationPending": true, "TotalSuspensions": 0, "BlockReads": 0, "DAGManJobId": 49581933, "MemoryUsage": "null( ( ResidentSetSize + 1023 ) / 1024 ); null", "ExitCode": 0, "JobNotification": 0, "BlockReadKbyt
 es": 0, "NumJobMatches": 1, "LocalUserCpu": 0.0d, "LastJobStatus": 2, "BufferBlockSize": 32768, "CommittedTime": 27676, "ExecutableSize_RAW": 6, "LastSuspensionTime": 0, "Matlab": "R2011b", "UserLog": "/home/xguo23/finally_2/Simulation_condor/model_3/40/process.log", "DAGManNodesMask": "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27", "CumulativeSlotTime": 27676.0d, "LastJobLeaseRenewal": 1446133737, "MachineAttrSlotWeight0": 1, "NumSystemHolds": 0, "BytesRecvd": 285053.0d, "CondorPlatform": "$CondorPlatform: X86_64-RedHat_6.6 $", "JOBGLIDEIN_ResourceName": "$$([IfThenElse(IsUndefined(TARGET.GLIDEIN_ResourceName), IfThenElse(IsUndefined(TARGET.GLIDEIN_Site), \"wisc.edu\", TARGET.GLIDEIN_Site), TARGET.GLIDEIN_ResourceName)])", "DAGNodeName": "40+40", "PeriodicRelease": "null( JobStatus == 5 ) && ( ( CurrentTime - EnteredCurrentStatus ) > 1800 ) && ( JobRunCount < 5 ) && ( HoldReasonCode != 6 ) && ( HoldReasonCode != 14 ) && ( HoldReasonCode != 22 ); null", "JobRunCount": 1, "LastRemoteHost": 
 "slot1@e286.chtc.wisc.edu", "JobPrio": 0, "LocalSysCpu": 0.0d, "ExecutableSize": 7, "RemoteSysCpu": 105.0d, "TransferInput": "/home/xguo23/finally_2/Simulation_condor/data/40/,/home/xguo23/finally_2/Simulation_condor/data/shared/", "PeriodicHold": false, "WantRemoteIO": true, "CommittedSuspensionTime": 0, "DAGParentNodeNames": "", "CompletionDate": 1446133737, "StreamErr": false, "RecentBlockReadKbytes": 0, "WantRemoteSyscalls": false, "NumShadowStarts": 1, "MachineAttrCpus0": 1, "DAGManNodesLog": "/home/xguo23/finally_2/Simulation_condor/model_3/./mydag.dag.nodes.log", "Owner": "xguo23", "Requirements": "null( ( OpSysMajorVer is 6 ) ) && ( MY.JobUniverse == 12 || MY.JobUniverse == 7 || MY.WantFlocking || MY.WantGlidein || TARGET.PoolName == \"CHTC\" || TARGET.COLLECTOR_HOST_STRING == \"infopool.cs.wisc.edu\" ) && ( TARGET.Arch == \"X86_64\" ) && ( TARGET.OpSys == \"LINUX\" ) && ( TARGET.Disk >= RequestDisk ) && ( TARGET.Memory >= RequestMemory ) && ( TARGET.HasFileTransfer ); null"
 , "DiskUsage": 1250000, "JobLeaseDuration": 2400, "ClusterId": 49581952, "BufferSize": 524288, "IsCHTCSubmit": true, "RecentStatsLifetimeStarter": 1200, "CommittedSlotTime": 27676.0d, "Args": "--type=Matlab --version=R2011b --cmdtorun=simu_condor --unique=40 -- 3", "Environment": "", "LastPublicClaimId": "<128.105.245.86:32129>#1444759888#6329#...", "Iwd": "/home/xguo23/finally_2/Simulation_condor/model_3/40", "QDate": 1446105329, "CurrentHosts": 0, "User": "xguo23@chtc.wisc.edu", "StreamOut": false }
- { "GlobalJobId": "submit-3.chtc.wisc.edu#49581985.0#1446105368", "StatsLifetimeStarter": 26354, "JobStartDate": 1446106289, "SubmitEventNotes": "DAG Node: 36+36", "JobStatus": 4, "LeaveJobInQueue": false, "StartdPrincipal": "execute-side@matchsession/128.105.244.249", "WantRHEL6": true, "OnExitRemove": true, "JobCurrentStartDate": 1446106289, "CoreSize": 0, "MATCH_EXP_JOBGLIDEIN_ResourceName": "wisc.edu", "Rank": 0.0d, "ExitStatus": 0, "ResidentSetSize": 150000, "RemoteWallClockTime": 26357.0d, "WantCheckpoint": false, "In": "/dev/null", "MaxHosts": 1, "RootDir": "/", "NumRestarts": 0, "RecentBlockWriteKbytes": 0, "DiskUsage_RAW": 1216669, "EnteredCurrentStatus": 1446132646, "ResidentSetSize_RAW": 127452, "RequestDisk": 1000000, "MyType": "Job", "PeriodicRemove": false, "Cmd": "/home/xguo23/finally_2/Simulation_condor/chtcjobwrapper", "CondorVersion": "$CondorVersion: 8.5.0 Sep 16 2015 BuildID: 341710 $", "ShouldTransferFiles": "YES", "TargetType": "Machine", "MinHosts": 1, "NumCkp
 ts_RAW": 0, "RequestCpus": 1, "RemoteUserCpu": 26239.0d, "BlockWrites": 0, "NiceUser": false, "Out": "process.out", "ImageSize_RAW": 811948, "BytesSent": 31898.0d, "CumulativeSuspensionTime": 0, "TransferIn": false, "NumCkpts": 0, "Err": "process.err", "RecentBlockWrites": 0, "JobFinishedHookDone": 1446132646, "ProcId": 0, "ImageSize": 1000000, "JobUniverse": 5, "EncryptExecuteDirectory": false, "TransferInputSizeMB": 0, "RecentBlockReads": 0, "SpooledOutputFiles": "CURLTIME_1151700,ChtcWrapper36.out,AuditLog.36,simu_3_36.txt,harvest.log,36.out", "BlockWriteKbytes": 0, "WhenToTransferOutput": "ON_EXIT", "JobCurrentStartExecutingDate": 1446106291, "ExitBySignal": false, "LastMatchTime": 1446106289, "OnExitHold": false, "OrigMaxHosts": 1, "RequestMemory": 1000, "NumJobStarts": 1, "TerminationPending": true, "TotalSuspensions": 0, "BlockReads": 0, "DAGManJobId": 49581933, "MemoryUsage": "null( ( ResidentSetSize + 1023 ) / 1024 ); null", "ExitCode": 0, "JobNotification": 0, "BlockReadKb
 ytes": 0, "NumJobMatches": 1, "LocalUserCpu": 0.0d, "LastJobStatus": 2, "BufferBlockSize": 32768, "CommittedTime": 26357, "ExecutableSize_RAW": 6, "LastSuspensionTime": 0, "Matlab": "R2011b", "UserLog": "/home/xguo23/finally_2/Simulation_condor/model_3/36/process.log", "DAGManNodesMask": "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27", "CumulativeSlotTime": 26357.0d, "LastJobLeaseRenewal": 1446132646, "MachineAttrSlotWeight0": 1, "NumSystemHolds": 0, "BytesRecvd": 285053.0d, "CondorPlatform": "$CondorPlatform: X86_64-RedHat_6.6 $", "JOBGLIDEIN_ResourceName": "$$([IfThenElse(IsUndefined(TARGET.GLIDEIN_ResourceName), IfThenElse(IsUndefined(TARGET.GLIDEIN_Site), \"wisc.edu\", TARGET.GLIDEIN_Site), TARGET.GLIDEIN_ResourceName)])", "DAGNodeName": "36+36", "PeriodicRelease": "null( JobStatus == 5 ) && ( ( CurrentTime - EnteredCurrentStatus ) > 1800 ) && ( JobRunCount < 5 ) && ( HoldReasonCode != 6 ) && ( HoldReasonCode != 14 ) && ( HoldReasonCode != 22 ); null", "JobRunCount": 1, "LastRemoteHost"
 : "slot1@e457.chtc.wisc.edu", "JobPrio": 0, "LocalSysCpu": 0.0d, "ExecutableSize": 7, "RemoteSysCpu": 96.0d, "TransferInput": "/home/xguo23/finally_2/Simulation_condor/data/36/,/home/xguo23/finally_2/Simulation_condor/data/shared/", "PeriodicHold": false, "WantRemoteIO": true, "CommittedSuspensionTime": 0, "DAGParentNodeNames": "", "CompletionDate": 1446132646, "StreamErr": false, "RecentBlockReadKbytes": 0, "WantRemoteSyscalls": false, "NumShadowStarts": 1, "MachineAttrCpus0": 1, "DAGManNodesLog": "/home/xguo23/finally_2/Simulation_condor/model_3/./mydag.dag.nodes.log", "Owner": "xguo23", "Requirements": "null( ( OpSysMajorVer is 6 ) ) && ( MY.JobUniverse == 12 || MY.JobUniverse == 7 || MY.WantFlocking || MY.WantGlidein || TARGET.PoolName == \"CHTC\" || TARGET.COLLECTOR_HOST_STRING == \"infopool.cs.wisc.edu\" ) && ( TARGET.Arch == \"X86_64\" ) && ( TARGET.OpSys == \"LINUX\" ) && ( TARGET.Disk >= RequestDisk ) && ( TARGET.Memory >= RequestMemory ) && ( TARGET.HasFileTransfer ); null
 ", "DiskUsage": 1250000, "JobLeaseDuration": 2400, "ClusterId": 49581985, "BufferSize": 524288, "IsCHTCSubmit": true, "RecentStatsLifetimeStarter": 1200, "CommittedSlotTime": 26357.0d, "Args": "--type=Matlab --version=R2011b --cmdtorun=simu_condor --unique=36 -- 3", "Environment": "", "LastPublicClaimId": "<128.105.244.249:28476>#1444685646#10655#...", "Iwd": "/home/xguo23/finally_2/Simulation_condor/model_3/36", "QDate": 1446105368, "CurrentHosts": 0, "User": "xguo23@chtc.wisc.edu", "StreamOut": false }
- { "GlobalJobId": "submit-3.chtc.wisc.edu#49581989.0#1446105374", "StatsLifetimeStarter": 27490, "JobStartDate": 1446106290, "SubmitEventNotes": "DAG Node: 82+82", "JobStatus": 4, "LeaveJobInQueue": false, "AutoClusterId": 24, "StartdPrincipal": "execute-side@matchsession/128.105.245.233", "WantRHEL6": true, "OnExitRemove": true, "JobCurrentStartDate": 1446106290, "CoreSize": 0, "MATCH_EXP_JOBGLIDEIN_ResourceName": "wisc.edu", "ExitStatus": 0, "Rank": 0.0d, "ResidentSetSize": 150000, "RemoteWallClockTime": 27491.0d, "WantCheckpoint": false, "In": "/dev/null", "MaxHosts": 1, "RootDir": "/", "NumRestarts": 0, "RecentBlockWriteKbytes": 0, "DiskUsage_RAW": 1216669, "EnteredCurrentStatus": 1446133781, "ResidentSetSize_RAW": 126932, "RequestDisk": 1000000, "MyType": "Job", "PeriodicRemove": false, "Cmd": "/home/xguo23/finally_2/Simulation_condor/chtcjobwrapper", "CondorVersion": "$CondorVersion: 8.5.0 Sep 16 2015 BuildID: 341710 $", "ShouldTransferFiles": "YES", "TargetType": "Machine", "
 MinHosts": 1, "NumCkpts_RAW": 0, "RequestCpus": 1, "RemoteUserCpu": 27288.0d, "BlockWrites": 0, "NiceUser": false, "Out": "process.out", "ImageSize_RAW": 811948, "BytesSent": 30553.0d, "CumulativeSuspensionTime": 0, "TransferIn": false, "AutoClusterAttrs": "JobUniverse,LastCheckpointPlatform,NumCkpts,ClientMachine,_condor_RequestCpus,_condor_RequestDisk,_condor_RequestGPUs,_condor_RequestMemory,RequestCpus,RequestDisk,RequestGPUs,RequestMemory,BIOCHEM,MachineLastMatchTime,ConcurrencyLimits,NiceUser,Rank,Requirements,ImageSize,MemoryRequirements,User,RemoteGroup,SubmitterGroup,SubmitterUserPrio,Group,WIDsTheme,InteractiveJob,Is_Resumable,WantFlocking,WantGlidein,Scheduler,Owner,JobStart,MemoryUsage,IsExpressQueueJob,DiskUsage,HEP_VO,IsDesktop,OSG_VO,x509userproxysubject,PassedTest,IsLocalCMSJob,IsLocalCMSSlot,IsSAMSlot,IsSAMJob,MaxDiskTempC,IsDedicated,estimated_run_hours,IsCHTCSubmit,RequiresCVMFS,DynamicSlot,PartitionableSlot,Slot1_ExpectedMachineGracefulDrainingCompletion,Slot1_Jo
 bStarts,Slot1_SelfMonitorAge,Slot2_ExpectedMachineGracefulDrainingCompletion,Slot2_JobStarts,Slot2_SelfMonitorAge,Slot3_ExpectedMachineGracefulDrainingCompletion,Slot3_JobStarts,Slot3_SelfMonitorAge,Slot4_ExpectedMachineGracefulDrainingCompletion,Slot4_JobStarts,Slot4_SelfMonitorAge,Slot5_ExpectedMachineGracefulDrainingCompletion,Slot5_JobStarts,Slot5_SelfMonitorAge,Slot6_ExpectedMachineGracefulDrainingCompletion,Slot6_JobStarts,Slot6_SelfMonitorAge,Slot7_ExpectedMachineGracefulDrainingCompletion,Slot7_JobStarts,Slot7_SelfMonitorAge,Slot8_ExpectedMachineGracefulDrainingCompletion,Slot8_JobStarts,Slot8_SelfMonitorAge,Slot1_TotalTimeClaimedBusy,Slot1_TotalTimeUnclaimedIdle,Slot2_TotalTimeClaimedBusy,Slot2_TotalTimeUnclaimedIdle,Slot3_TotalTimeClaimedBusy,Slot3_TotalTimeUnclaimedIdle,Slot4_TotalTimeClaimedBusy,Slot4_TotalTimeUnclaimedIdle,Slot5_TotalTimeClaimedBusy,Slot5_TotalTimeUnclaimedIdle,Slot6_TotalTimeClaimedBusy,Slot6_TotalTimeUnclaimedIdle,Slot7_TotalTimeClaimedBusy,Slot7_Tota
 lTimeUnclaimedIdle,Slot8_TotalTimeClaimedBusy,Slot8_TotalTimeUnclaimedIdle,Slot10_ExpectedMachineGracefulDrainingCompletion,Slot10_JobStarts,Slot10_SelfMonitorAge,Slot11_ExpectedMachineGracefulDrainingCompletion,Slot11_JobStarts,Slot11_SelfMonitorAge,Slot12_ExpectedMachineGracefulDrainingCompletion,Slot12_JobStarts,Slot12_SelfMonitorAge,Slot9_ExpectedMachineGracefulDrainingCompletion,Slot9_JobStarts,Slot9_SelfMonitorAge,Slot12_TotalTimeClaimedBusy,Slot10_TotalTimeClaimedBusy,Slot10_TotalTimeUnclaimedIdle,Slot11_TotalTimeClaimedBusy,Slot11_TotalTimeUnclaimedIdle,Slot12_TotalTimeUnclaimedIdle,Slot9_TotalTimeClaimedBusy,Slot9_TotalTimeUnclaimedIdle,Slot13_ExpectedMachineGracefulDrainingCompletion,Slot13_JobStarts,Slot13_SelfMonitorAge,Slot14_ExpectedMachineGracefulDrainingCompletion,Slot14_JobStarts,Slot14_SelfMonitorAge,Slot15_ExpectedMachineGracefulDrainingCompletion,Slot15_JobStarts,Slot15_SelfMonitorAge,Slot16_ExpectedMachineGracefulDrainingCompletion,Slot16_JobStarts,Slot16_SelfMo
 nitorAge,IsResumable,WHEN_TO_TRANSFER_OUTPUT,_condor_Requestadmin_mutex_1,_condor_Requestadmin_mutex_2,_condor_Requestadmin_mutex_3,_condor_Requestmachine_token,Requestadmin_mutex_1,Requestadmin_mutex_2,Requestadmin_mutex_3,Requestmachine_token,nyehle,IsBuildJob,IsMatlabBuildJob,TotalJobRunTime,NodeOnline,Slot13_TotalTimeClaimedBusy,Slot13_TotalTimeUnclaimedIdle,Slot14_TotalTimeClaimedBusy,Slot14_TotalTimeUnclaimedIdle,Slot15_TotalTimeClaimedBusy,Slot15_TotalTimeUnclaimedIdle,Slot16_TotalTimeClaimedBusy,Slot16_TotalTimeUnclaimedIdle,TmpIsFull,trResumable,RequiresCMSFrontier,Slot17_ExpectedMachineGracefulDrainingCompletion,Slot17_JobStarts,Slot17_SelfMonitorAge,Slot17_TotalTimeClaimedBusy,Slot17_TotalTimeUnclaimedIdle,Slot18_ExpectedMachineGracefulDrainingCompletion,Slot18_JobStarts,Slot18_SelfMonitorAge,Slot18_TotalTimeClaimedBusy,Slot18_TotalTimeUnclaimedIdle,Slot19_ExpectedMachineGracefulDrainingCompletion,Slot19_JobStarts,Slot19_SelfMonitorAge,Slot19_TotalTimeClaimedBusy,Slot19_T
 otalTimeUnclaimedIdle,Slot20_ExpectedMachineGracefulDrainingCompletion,Slot20_JobStarts,Slot20_SelfMonitorAge,Slot20_TotalTimeClaimedBusy,Slot20_TotalTimeUnclaimedIdle,Slot21_ExpectedMachineGracefulDrainingCompletion,Slot21_JobStarts,Slot21_SelfMonitorAge,Slot21_TotalTimeClaimedBusy,Slot21_TotalTimeUnclaimedIdle,Slot22_ExpectedMachineGracefulDrainingCompletion,Slot22_JobStarts,Slot22_SelfMonitorAge,Slot22_TotalTimeClaimedBusy,Slot22_TotalTimeUnclaimedIdle,Slot23_ExpectedMachineGracefulDrainingCompletion,Slot23_JobStarts,Slot23_SelfMonitorAge,Slot23_TotalTimeClaimedBusy,Slot23_TotalTimeUnclaimedIdle,Slot24_ExpectedMachineGracefulDrainingCompletion,Slot24_JobStarts,Slot24_SelfMonitorAge,Slot24_TotalTimeClaimedBusy,Slot24_TotalTimeUnclaimedIdle,Slot25_ExpectedMachineGracefulDrainingCompletion,Slot25_JobStarts,Slot25_SelfMonitorAge,Slot25_TotalTimeClaimedBusy,Slot25_TotalTimeUnclaimedIdle,Slot26_ExpectedMachineGracefulDrainingCompletion,Slot26_JobStarts,Slot26_SelfMonitorAge,Slot26_Tota
 lTimeClaimedBusy,Slot26_TotalTimeUnclaimedIdle,Slot27_ExpectedMachineGracefulDrainingCompletion,Slot27_JobStarts,Slot27_SelfMonitorAge,Slot27_TotalTimeClaimedBusy,Slot27_TotalTimeUnclaimedIdle,Slot28_ExpectedMachineGracefulDrainingCompletion,Slot28_JobStarts,Slot28_SelfMonitorAge,Slot28_TotalTimeClaimedBusy,Slot28_TotalTimeUnclaimedIdle,Slot29_ExpectedMachineGracefulDrainingCompletion,Slot29_JobStarts,Slot29_SelfMonitorAge,Slot29_TotalTimeClaimedBusy,Slot29_TotalTimeUnclaimedIdle,Slot30_ExpectedMachineGracefulDrainingCompletion,Slot30_JobStarts,Slot30_SelfMonitorAge,Slot30_TotalTimeClaimedBusy,Slot30_TotalTimeUnclaimedIdle,Slot31_ExpectedMachineGracefulDrainingCompletion,Slot31_JobStarts,Slot31_SelfMonitorAge,Slot31_TotalTimeClaimedBusy,Slot31_TotalTimeUnclaimedIdle,Slot32_ExpectedMachineGracefulDrainingCompletion,Slot32_JobStarts,Slot32_SelfMonitorAge,Slot32_TotalTimeClaimedBusy,Slot32_TotalTimeUnclaimedIdle,ResidentSetSize", "NumCkpts": 0, "Err": "process.err", "RecentBlockWrites"
 : 0, "JobFinishedHookDone": 1446133782, "ProcId": 0, "ImageSize": 1000000, "JobUniverse": 5, "EncryptExecuteDirectory": false, "TransferInputSizeMB": 0, "RecentBlockReads": 0, "SpooledOutputFiles": "CURLTIME_4096502,ChtcWrapper82.out,AuditLog.82,simu_3_82.txt,harvest.log,82.out", "BlockWriteKbytes": 0, "WhenToTransferOutput": "ON_EXIT", "JobCurrentStartExecutingDate": 1446106291, "ExitBySignal": false, "LastMatchTime": 1446106290, "OnExitHold": false, "OrigMaxHosts": 1, "RequestMemory": 1000, "NumJobStarts": 1, "TerminationPending": true, "TotalSuspensions": 0, "BlockReads": 0, "DAGManJobId": 49581933, "MemoryUsage": "null( ( ResidentSetSize + 1023 ) / 1024 ); null", "ExitCode": 0, "JobNotification": 0, "BlockReadKbytes": 0, "NumJobMatches": 1, "LocalUserCpu": 0.0d, "LastJobStatus": 2, "BufferBlockSize": 32768, "CommittedTime": 27491, "ExecutableSize_RAW": 6, "LastSuspensionTime": 0, "Matlab": "R2011b", "UserLog": "/home/xguo23/finally_2/Simulation_condor/model_3/82/process.log", "D
 AGManNodesMask": "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27", "CumulativeSlotTime": 27491.0d, "LastJobLeaseRenewal": 1446133781, "MachineAttrSlotWeight0": 1, "NumSystemHolds": 0, "BytesRecvd": 285053.0d, "CondorPlatform": "$CondorPlatform: X86_64-RedHat_6.6 $", "JOBGLIDEIN_ResourceName": "$$([IfThenElse(IsUndefined(TARGET.GLIDEIN_ResourceName), IfThenElse(IsUndefined(TARGET.GLIDEIN_Site), \"wisc.edu\", TARGET.GLIDEIN_Site), TARGET.GLIDEIN_ResourceName)])", "DAGNodeName": "82+82", "PeriodicRelease": "null( JobStatus == 5 ) && ( ( CurrentTime - EnteredCurrentStatus ) > 1800 ) && ( JobRunCount < 5 ) && ( HoldReasonCode != 6 ) && ( HoldReasonCode != 14 ) && ( HoldReasonCode != 22 ); null", "JobRunCount": 1, "LastRemoteHost": "slot1@e433.chtc.wisc.edu", "JobPrio": 0, "LocalSysCpu": 0.0d, "ExecutableSize": 7, "RemoteSysCpu": 173.0d, "TransferInput": "/home/xguo23/finally_2/Simulation_condor/data/82/,/home/xguo23/finally_2/Simulation_condor/data/shared/", "PeriodicHold": false, "WantRemoteIO":
  true, "CommittedSuspensionTime": 0, "DAGParentNodeNames": "", "CompletionDate": 1446133781, "StreamErr": false, "RecentBlockReadKbytes": 0, "WantRemoteSyscalls": false, "NumShadowStarts": 1, "MachineAttrCpus0": 1, "DAGManNodesLog": "/home/xguo23/finally_2/Simulation_condor/model_3/./mydag.dag.nodes.log", "Owner": "xguo23", "Requirements": "null( ( OpSysMajorVer is 6 ) ) && ( MY.JobUniverse == 12 || MY.JobUniverse == 7 || MY.WantFlocking || MY.WantGlidein || TARGET.PoolName == \"CHTC\" || TARGET.COLLECTOR_HOST_STRING == \"infopool.cs.wisc.edu\" ) && ( TARGET.Arch == \"X86_64\" ) && ( TARGET.OpSys == \"LINUX\" ) && ( TARGET.Disk >= RequestDisk ) && ( TARGET.Memory >= RequestMemory ) && ( TARGET.HasFileTransfer ); null", "DiskUsage": 1250000, "JobLeaseDuration": 2400, "ClusterId": 49581989, "BufferSize": 524288, "IsCHTCSubmit": true, "RecentStatsLifetimeStarter": 1200, "CommittedSlotTime": 27491.0d, "Args": "--type=Matlab --version=R2011b --cmdtorun=simu_condor --unique=82 -- 3", "Env
 ironment": "", "LastPublicClaimId": "<128.105.245.233:28601>#1443991451#13496#...", "Iwd": "/home/xguo23/finally_2/Simulation_condor/model_3/82", "QDate": 1446105374, "CurrentHosts": 0, "User": "xguo23@chtc.wisc.edu", "StreamOut": false }
- { "GlobalJobId": "submit-3.chtc.wisc.edu#49582049.0#1446105441", "StatsLifetimeStarter": 26296, "JobStartDate": 1446106482, "SubmitEventNotes": "DAG Node: 112+112", "JobStatus": 4, "LeaveJobInQueue": false, "StartdPrincipal": "execute-side@matchsession/128.105.245.245", "WantRHEL6": true, "OnExitRemove": true, "JobCurrentStartDate": 1446106482, "CoreSize": 0, "MATCH_EXP_JOBGLIDEIN_ResourceName": "wisc.edu", "Rank": 0.0d, "ExitStatus": 0, "ResidentSetSize": 150000, "RemoteWallClockTime": 26298.0d, "WantCheckpoint": false, "In": "/dev/null", "MaxHosts": 1, "RootDir": "/", "NumRestarts": 0, "RecentBlockWriteKbytes": 0, "DiskUsage_RAW": 1216669, "EnteredCurrentStatus": 1446132780, "ResidentSetSize_RAW": 126892, "RequestDisk": 1000000, "MyType": "Job", "PeriodicRemove": false, "Cmd": "/home/xguo23/finally_2/Simulation_condor/chtcjobwrapper", "CondorVersion": "$CondorVersion: 8.5.0 Sep 16 2015 BuildID: 341710 $", "ShouldTransferFiles": "YES", "TargetType": "Machine", "MinHosts": 1, "NumC
 kpts_RAW": 0, "RequestCpus": 1, "RemoteUserCpu": 26097.0d, "BlockWrites": 0, "NiceUser": false, "Out": "process.out", "ImageSize_RAW": 811948, "BytesSent": 31904.0d, "CumulativeSuspensionTime": 0, "TransferIn": false, "NumCkpts": 0, "Err": "process.err", "RecentBlockWrites": 0, "JobFinishedHookDone": 1446132780, "ProcId": 0, "ImageSize": 1000000, "JobUniverse": 5, "EncryptExecuteDirectory": false, "TransferInputSizeMB": 0, "RecentBlockReads": 0, "SpooledOutputFiles": "CURLTIME_2601607,ChtcWrapper112.out,AuditLog.112,simu_3_112.txt,harvest.log,112.out", "BlockWriteKbytes": 0, "WhenToTransferOutput": "ON_EXIT", "JobCurrentStartExecutingDate": 1446106484, "ExitBySignal": false, "LastMatchTime": 1446106482, "OnExitHold": false, "OrigMaxHosts": 1, "RequestMemory": 1000, "NumJobStarts": 1, "TerminationPending": true, "TotalSuspensions": 0, "BlockReads": 0, "DAGManJobId": 49581933, "MemoryUsage": "null( ( ResidentSetSize + 1023 ) / 1024 ); null", "ExitCode": 0, "JobNotification": 0, "Block
 ReadKbytes": 0, "NumJobMatches": 1, "LocalUserCpu": 0.0d, "LastJobStatus": 2, "BufferBlockSize": 32768, "CommittedTime": 26298, "ExecutableSize_RAW": 6, "LastSuspensionTime": 0, "Matlab": "R2011b", "UserLog": "/home/xguo23/finally_2/Simulation_condor/model_3/112/process.log", "DAGManNodesMask": "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27", "CumulativeSlotTime": 26298.0d, "LastJobLeaseRenewal": 1446132780, "MachineAttrSlotWeight0": 1, "NumSystemHolds": 0, "BytesRecvd": 285054.0d, "CondorPlatform": "$CondorPlatform: X86_64-RedHat_6.6 $", "JOBGLIDEIN_ResourceName": "$$([IfThenElse(IsUndefined(TARGET.GLIDEIN_ResourceName), IfThenElse(IsUndefined(TARGET.GLIDEIN_Site), \"wisc.edu\", TARGET.GLIDEIN_Site), TARGET.GLIDEIN_ResourceName)])", "DAGNodeName": "112+112", "PeriodicRelease": "null( JobStatus == 5 ) && ( ( CurrentTime - EnteredCurrentStatus ) > 1800 ) && ( JobRunCount < 5 ) && ( HoldReasonCode != 6 ) && ( HoldReasonCode != 14 ) && ( HoldReasonCode != 22 ); null", "JobRunCount": 1, "LastRe
 moteHost": "slot1@e445.chtc.WISC.EDU", "JobPrio": 0, "LocalSysCpu": 0.0d, "ExecutableSize": 7, "RemoteSysCpu": 164.0d, "TransferInput": "/home/xguo23/finally_2/Simulation_condor/data/112/,/home/xguo23/finally_2/Simulation_condor/data/shared/", "PeriodicHold": false, "WantRemoteIO": true, "CommittedSuspensionTime": 0, "DAGParentNodeNames": "", "CompletionDate": 1446132780, "StreamErr": false, "RecentBlockReadKbytes": 0, "WantRemoteSyscalls": false, "NumShadowStarts": 1, "MachineAttrCpus0": 1, "DAGManNodesLog": "/home/xguo23/finally_2/Simulation_condor/model_3/./mydag.dag.nodes.log", "Owner": "xguo23", "Requirements": "null( ( OpSysMajorVer is 6 ) ) && ( MY.JobUniverse == 12 || MY.JobUniverse == 7 || MY.WantFlocking || MY.WantGlidein || TARGET.PoolName == \"CHTC\" || TARGET.COLLECTOR_HOST_STRING == \"infopool.cs.wisc.edu\" ) && ( TARGET.Arch == \"X86_64\" ) && ( TARGET.OpSys == \"LINUX\" ) && ( TARGET.Disk >= RequestDisk ) && ( TARGET.Memory >= RequestMemory ) && ( TARGET.HasFileTrans
 fer ); null", "DiskUsage": 1250000, "JobLeaseDuration": 2400, "ClusterId": 49582049, "BufferSize": 524288, "IsCHTCSubmit": true, "RecentStatsLifetimeStarter": 1200, "CommittedSlotTime": 26298.0d, "Args": "--type=Matlab --version=R2011b --cmdtorun=simu_condor --unique=112 -- 3", "Environment": "", "LastPublicClaimId": "<128.105.245.245:48407>#1443991450#14631#...", "Iwd": "/home/xguo23/finally_2/Simulation_condor/model_3/112", "QDate": 1446105441, "CurrentHosts": 0, "User": "xguo23@chtc.wisc.edu", "StreamOut": false }
- { "GlobalJobId": "submit-3.chtc.wisc.edu#49582050.0#1446105441", "StatsLifetimeStarter": 27141, "JobStartDate": 1446106482, "SubmitEventNotes": "DAG Node: 301+301", "JobStatus": 4, "LeaveJobInQueue": false, "StartdPrincipal": "execute-side@matchsession/128.105.245.172", "WantRHEL6": true, "OnExitRemove": true, "JobCurrentStartDate": 1446106482, "CoreSize": 0, "MATCH_EXP_JOBGLIDEIN_ResourceName": "wisc.edu", "Rank": 0.0d, "ExitStatus": 0, "ResidentSetSize": 150000, "RemoteWallClockTime": 27143.0d, "WantCheckpoint": false, "In": "/dev/null", "MaxHosts": 1, "RootDir": "/", "NumRestarts": 0, "RecentBlockWriteKbytes": 0, "DiskUsage_RAW": 1216669, "EnteredCurrentStatus": 1446133625, "ResidentSetSize_RAW": 126464, "RequestDisk": 1000000, "MyType": "Job", "PeriodicRemove": false, "Cmd": "/home/xguo23/finally_2/Simulation_condor/chtcjobwrapper", "CondorVersion": "$CondorVersion: 8.5.0 Sep 16 2015 BuildID: 341710 $", "ShouldTransferFiles": "YES", "TargetType": "Machine", "MinHosts": 1, "NumC
 kpts_RAW": 0, "RequestCpus": 1, "RemoteUserCpu": 26895.0d, "BlockWrites": 0, "NiceUser": false, "Out": "process.out", "ImageSize_RAW": 811948, "BytesSent": 31905.0d, "CumulativeSuspensionTime": 0, "TransferIn": false, "NumCkpts": 0, "Err": "process.err", "RecentBlockWrites": 0, "JobFinishedHookDone": 1446133625, "ProcId": 0, "ImageSize": 1000000, "JobUniverse": 5, "EncryptExecuteDirectory": false, "TransferInputSizeMB": 0, "RecentBlockReads": 0, "SpooledOutputFiles": "CURLTIME_2158419,ChtcWrapper301.out,AuditLog.301,simu_3_301.txt,harvest.log,301.out", "BlockWriteKbytes": 0, "WhenToTransferOutput": "ON_EXIT", "JobCurrentStartExecutingDate": 1446106484, "ExitBySignal": false, "LastMatchTime": 1446106482, "OnExitHold": false, "OrigMaxHosts": 1, "RequestMemory": 1000, "NumJobStarts": 1, "TerminationPending": true, "TotalSuspensions": 0, "BlockReads": 0, "DAGManJobId": 49581933, "MemoryUsage": "null( ( ResidentSetSize + 1023 ) / 1024 ); null", "ExitCode": 0, "JobNotification": 0, "Block
 ReadKbytes": 0, "NumJobMatches": 1, "LocalUserCpu": 0.0d, "LastJobStatus": 2, "BufferBlockSize": 32768, "CommittedTime": 27143, "ExecutableSize_RAW": 6, "LastSuspensionTime": 0, "Matlab": "R2011b", "UserLog": "/home/xguo23/finally_2/Simulation_condor/model_3/301/process.log", "DAGManNodesMask": "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27", "CumulativeSlotTime": 27143.0d, "LastJobLeaseRenewal": 1446133625, "MachineAttrSlotWeight0": 1, "NumSystemHolds": 0, "BytesRecvd": 285054.0d, "CondorPlatform": "$CondorPlatform: X86_64-RedHat_6.6 $", "JOBGLIDEIN_ResourceName": "$$([IfThenElse(IsUndefined(TARGET.GLIDEIN_ResourceName), IfThenElse(IsUndefined(TARGET.GLIDEIN_Site), \"wisc.edu\", TARGET.GLIDEIN_Site), TARGET.GLIDEIN_ResourceName)])", "DAGNodeName": "301+301", "PeriodicRelease": "null( JobStatus == 5 ) && ( ( CurrentTime - EnteredCurrentStatus ) > 1800 ) && ( JobRunCount < 5 ) && ( HoldReasonCode != 6 ) && ( HoldReasonCode != 14 ) && ( HoldReasonCode != 22 ); null", "JobRunCount": 1, "LastRe
 moteHost": "slot1@e372.chtc.wisc.edu", "JobPrio": 0, "LocalSysCpu": 0.0d, "ExecutableSize": 7, "RemoteSysCpu": 201.0d, "TransferInput": "/home/xguo23/finally_2/Simulation_condor/data/301/,/home/xguo23/finally_2/Simulation_condor/data/shared/", "PeriodicHold": false, "WantRemoteIO": true, "CommittedSuspensionTime": 0, "DAGParentNodeNames": "", "CompletionDate": 1446133625, "StreamErr": false, "RecentBlockReadKbytes": 0, "WantRemoteSyscalls": false, "NumShadowStarts": 1, "MachineAttrCpus0": 1, "DAGManNodesLog": "/home/xguo23/finally_2/Simulation_condor/model_3/./mydag.dag.nodes.log", "Owner": "xguo23", "Requirements": "null( ( OpSysMajorVer is 6 ) ) && ( MY.JobUniverse == 12 || MY.JobUniverse == 7 || MY.WantFlocking || MY.WantGlidein || TARGET.PoolName == \"CHTC\" || TARGET.COLLECTOR_HOST_STRING == \"infopool.cs.wisc.edu\" ) && ( TARGET.Arch == \"X86_64\" ) && ( TARGET.OpSys == \"LINUX\" ) && ( TARGET.Disk >= RequestDisk ) && ( TARGET.Memory >= RequestMemory ) && ( TARGET.HasFileTrans
 fer ); null", "DiskUsage": 1250000, "JobLeaseDuration": 2400, "ClusterId": 49582050, "BufferSize": 524288, "IsCHTCSubmit": true, "RecentStatsLifetimeStarter": 1200, "CommittedSlotTime": 27143.0d, "Args": "--type=Matlab --version=R2011b --cmdtorun=simu_condor --unique=301 -- 3", "Environment": "", "LastPublicClaimId": "<128.105.245.172:19856>#1444760019#9307#...", "Iwd": "/home/xguo23/finally_2/Simulation_condor/model_3/301", "QDate": 1446105441, "CurrentHosts": 0, "User": "xguo23@chtc.wisc.edu", "StreamOut": false }
- { "GlobalJobId": "submit-3.chtc.wisc.edu#49582065.0#1446105458", "StatsLifetimeStarter": 25606, "JobStartDate": 1446107042, "SubmitEventNotes": "DAG Node: 401+401", "JobStatus": 4, "LeaveJobInQueue": false, "AutoClusterId": 24, "StartdPrincipal": "execute-side@matchsession/128.105.245.206", "WantRHEL6": true, "OnExitRemove": true, "JobCurrentStartDate": 1446107042, "CoreSize": 0, "MATCH_EXP_JOBGLIDEIN_ResourceName": "wisc.edu", "ExitStatus": 0, "Rank": 0.0d, "ResidentSetSize": 150000, "RemoteWallClockTime": 25607.0d, "WantCheckpoint": false, "In": "/dev/null", "MaxHosts": 1, "RootDir": "/", "NumRestarts": 0, "RecentBlockWriteKbytes": 0, "DiskUsage_RAW": 1216669, "EnteredCurrentStatus": 1446132649, "ResidentSetSize_RAW": 126608, "RequestDisk": 1000000, "MyType": "Job", "PeriodicRemove": false, "Cmd": "/home/xguo23/finally_2/Simulation_condor/chtcjobwrapper", "CondorVersion": "$CondorVersion: 8.5.0 Sep 16 2015 BuildID: 341710 $", "ShouldTransferFiles": "YES", "TargetType": "Machine",
  "MinHosts": 1, "NumCkpts_RAW": 0, "RequestCpus": 1, "RemoteUserCpu": 25478.0d, "BlockWrites": 0, "NiceUser": false, "Out": "process.out", "ImageSize_RAW": 811948, "BytesSent": 30661.0d, "CumulativeSuspensionTime": 0, "TransferIn": false, "AutoClusterAttrs": "JobUniverse,LastCheckpointPlatform,NumCkpts,ClientMachine,_condor_RequestCpus,_condor_RequestDisk,_condor_RequestGPUs,_condor_RequestMemory,RequestCpus,RequestDisk,RequestGPUs,RequestMemory,BIOCHEM,MachineLastMatchTime,ConcurrencyLimits,NiceUser,Rank,Requirements,ImageSize,MemoryRequirements,User,RemoteGroup,SubmitterGroup,SubmitterUserPrio,Group,WIDsTheme,InteractiveJob,Is_Resumable,WantFlocking,WantGlidein,Scheduler,Owner,JobStart,MemoryUsage,IsExpressQueueJob,DiskUsage,HEP_VO,IsDesktop,OSG_VO,x509userproxysubject,PassedTest,IsLocalCMSJob,IsLocalCMSSlot,IsSAMSlot,IsSAMJob,MaxDiskTempC,IsDedicated,estimated_run_hours,IsCHTCSubmit,RequiresCVMFS,DynamicSlot,PartitionableSlot,Slot1_ExpectedMachineGracefulDrainingCompletion,Slot1_
 JobStarts,Slot1_SelfMonitorAge,Slot2_ExpectedMachineGracefulDrainingCompletion,Slot2_JobStarts,Slot2_SelfMonitorAge,Slot3_ExpectedMachineGracefulDrainingCompletion,Slot3_JobStarts,Slot3_SelfMonitorAge,Slot4_ExpectedMachineGracefulDrainingCompletion,Slot4_JobStarts,Slot4_SelfMonitorAge,Slot5_ExpectedMachineGracefulDrainingCompletion,Slot5_JobStarts,Slot5_SelfMonitorAge,Slot6_ExpectedMachineGracefulDrainingCompletion,Slot6_JobStarts,Slot6_SelfMonitorAge,Slot7_ExpectedMachineGracefulDrainingCompletion,Slot7_JobStarts,Slot7_SelfMonitorAge,Slot8_ExpectedMachineGracefulDrainingCompletion,Slot8_JobStarts,Slot8_SelfMonitorAge,Slot1_TotalTimeClaimedBusy,Slot1_TotalTimeUnclaimedIdle,Slot2_TotalTimeClaimedBusy,Slot2_TotalTimeUnclaimedIdle,Slot3_TotalTimeClaimedBusy,Slot3_TotalTimeUnclaimedIdle,Slot4_TotalTimeClaimedBusy,Slot4_TotalTimeUnclaimedIdle,Slot5_TotalTimeClaimedBusy,Slot5_TotalTimeUnclaimedIdle,Slot6_TotalTimeClaimedBusy,Slot6_TotalTimeUnclaimedIdle,Slot7_TotalTimeClaimedBusy,Slot7_To
 talTimeUnclaimedIdle,Slot8_TotalTimeClaimedBusy,Slot8_TotalTimeUnclaimedIdle,Slot10_ExpectedMachineGracefulDrainingCompletion,Slot10_JobStarts,Slot10_SelfMonitorAge,Slot11_ExpectedMachineGracefulDrainingCompletion,Slot11_JobStarts,Slot11_SelfMonitorAge,Slot12_ExpectedMachineGracefulDrainingCompletion,Slot12_JobStarts,Slot12_SelfMonitorAge,Slot9_ExpectedMachineGracefulDrainingCompletion,Slot9_JobStarts,Slot9_SelfMonitorAge,Slot12_TotalTimeClaimedBusy,Slot10_TotalTimeClaimedBusy,Slot10_TotalTimeUnclaimedIdle,Slot11_TotalTimeClaimedBusy,Slot11_TotalTimeUnclaimedIdle,Slot12_TotalTimeUnclaimedIdle,Slot9_TotalTimeClaimedBusy,Slot9_TotalTimeUnclaimedIdle,Slot13_ExpectedMachineGracefulDrainingCompletion,Slot13_JobStarts,Slot13_SelfMonitorAge,Slot14_ExpectedMachineGracefulDrainingCompletion,Slot14_JobStarts,Slot14_SelfMonitorAge,Slot15_ExpectedMachineGracefulDrainingCompletion,Slot15_JobStarts,Slot15_SelfMonitorAge,Slot16_ExpectedMachineGracefulDrainingCompletion,Slot16_JobStarts,Slot16_Self
 MonitorAge,IsResumable,WHEN_TO_TRANSFER_OUTPUT,_condor_Requestadmin_mutex_1,_condor_Requestadmin_mutex_2,_condor_Requestadmin_mutex_3,_condor_Requestmachine_token,Requestadmin_mutex_1,Requestadmin_mutex_2,Requestadmin_mutex_3,Requestmachine_token,nyehle,IsBuildJob,IsMatlabBuildJob,TotalJobRunTime,NodeOnline,Slot13_TotalTimeClaimedBusy,Slot13_TotalTimeUnclaimedIdle,Slot14_TotalTimeClaimedBusy,Slot14_TotalTimeUnclaimedIdle,Slot15_TotalTimeClaimedBusy,Slot15_TotalTimeUnclaimedIdle,Slot16_TotalTimeClaimedBusy,Slot16_TotalTimeUnclaimedIdle,TmpIsFull,trResumable,RequiresCMSFrontier,Slot17_ExpectedMachineGracefulDrainingCompletion,Slot17_JobStarts,Slot17_SelfMonitorAge,Slot17_TotalTimeClaimedBusy,Slot17_TotalTimeUnclaimedIdle,Slot18_ExpectedMachineGracefulDrainingCompletion,Slot18_JobStarts,Slot18_SelfMonitorAge,Slot18_TotalTimeClaimedBusy,Slot18_TotalTimeUnclaimedIdle,Slot19_ExpectedMachineGracefulDrainingCompletion,Slot19_JobStarts,Slot19_SelfMonitorAge,Slot19_TotalTimeClaimedBusy,Slot19
 _TotalTimeUnclaimedIdle,Slot20_ExpectedMachineGracefulDrainingCompletion,Slot20_JobStarts,Slot20_SelfMonitorAge,Slot20_TotalTimeClaimedBusy,Slot20_TotalTimeUnclaimedIdle,Slot21_ExpectedMachineGracefulDrainingCompletion,Slot21_JobStarts,Slot21_SelfMonitorAge,Slot21_TotalTimeClaimedBusy,Slot21_TotalTimeUnclaimedIdle,Slot22_ExpectedMachineGracefulDrainingCompletion,Slot22_JobStarts,Slot22_SelfMonitorAge,Slot22_TotalTimeClaimedBusy,Slot22_TotalTimeUnclaimedIdle,Slot23_ExpectedMachineGracefulDrainingCompletion,Slot23_JobStarts,Slot23_SelfMonitorAge,Slot23_TotalTimeClaimedBusy,Slot23_TotalTimeUnclaimedIdle,Slot24_ExpectedMachineGracefulDrainingCompletion,Slot24_JobStarts,Slot24_SelfMonitorAge,Slot24_TotalTimeClaimedBusy,Slot24_TotalTimeUnclaimedIdle,Slot25_ExpectedMachineGracefulDrainingCompletion,Slot25_JobStarts,Slot25_SelfMonitorAge,Slot25_TotalTimeClaimedBusy,Slot25_TotalTimeUnclaimedIdle,Slot26_ExpectedMachineGracefulDrainingCompletion,Slot26_JobStarts,Slot26_SelfMonitorAge,Slot26_To
 talTimeClaimedBusy,Slot26_TotalTimeUnclaimedIdle,Slot27_ExpectedMachineGracefulDrainingCompletion,Slot27_JobStarts,Slot27_SelfMonitorAge,Slot27_TotalTimeClaimedBusy,Slot27_TotalTimeUnclaimedIdle,Slot28_ExpectedMachineGracefulDrainingCompletion,Slot28_JobStarts,Slot28_SelfMonitorAge,Slot28_TotalTimeClaimedBusy,Slot28_TotalTimeUnclaimedIdle,Slot29_ExpectedMachineGracefulDrainingCompletion,Slot29_JobStarts,Slot29_SelfMonitorAge,Slot29_TotalTimeClaimedBusy,Slot29_TotalTimeUnclaimedIdle,Slot30_ExpectedMachineGracefulDrainingCompletion,Slot30_JobStarts,Slot30_SelfMonitorAge,Slot30_TotalTimeClaimedBusy,Slot30_TotalTimeUnclaimedIdle,Slot31_ExpectedMachineGracefulDrainingCompletion,Slot31_JobStarts,Slot31_SelfMonitorAge,Slot31_TotalTimeClaimedBusy,Slot31_TotalTimeUnclaimedIdle,Slot32_ExpectedMachineGracefulDrainingCompletion,Slot32_JobStarts,Slot32_SelfMonitorAge,Slot32_TotalTimeClaimedBusy,Slot32_TotalTimeUnclaimedIdle,ResidentSetSize", "NumCkpts": 0, "Err": "process.err", "RecentBlockWrite
 s": 0, "JobFinishedHookDone": 1446132649, "ProcId": 0, "ImageSize": 1000000, "JobUniverse": 5, "EncryptExecuteDirectory": false, "TransferInputSizeMB": 0, "RecentBlockReads": 0, "SpooledOutputFiles": "CURLTIME_1160521,ChtcWrapper401.out,AuditLog.401,simu_3_401.txt,harvest.log,401.out", "BlockWriteKbytes": 0, "WhenToTransferOutput": "ON_EXIT", "JobCurrentStartExecutingDate": 1446107043, "ExitBySignal": false, "LastMatchTime": 1446107042, "OnExitHold": false, "OrigMaxHosts": 1, "RequestMemory": 1000, "NumJobStarts": 1, "TerminationPending": true, "TotalSuspensions": 0, "BlockReads": 0, "DAGManJobId": 49581933, "MemoryUsage": "null( ( ResidentSetSize + 1023 ) / 1024 ); null", "ExitCode": 0, "JobNotification": 0, "BlockReadKbytes": 0, "NumJobMatches": 1, "LocalUserCpu": 0.0d, "LastJobStatus": 2, "BufferBlockSize": 32768, "CommittedTime": 25607, "ExecutableSize_RAW": 6, "LastSuspensionTime": 0, "Matlab": "R2011b", "UserLog": "/home/xguo23/finally_2/Simulation_condor/model_3/401/process.l
 og", "DAGManNodesMask": "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27", "CumulativeSlotTime": 25607.0d, "LastJobLeaseRenewal": 1446132649, "MachineAttrSlotWeight0": 1, "NumSystemHolds": 0, "BytesRecvd": 285054.0d, "CondorPlatform": "$CondorPlatform: X86_64-RedHat_6.6 $", "JOBGLIDEIN_ResourceName": "$$([IfThenElse(IsUndefined(TARGET.GLIDEIN_ResourceName), IfThenElse(IsUndefined(TARGET.GLIDEIN_Site), \"wisc.edu\", TARGET.GLIDEIN_Site), TARGET.GLIDEIN_ResourceName)])", "DAGNodeName": "401+401", "PeriodicRelease": "null( JobStatus == 5 ) && ( ( CurrentTime - EnteredCurrentStatus ) > 1800 ) && ( JobRunCount < 5 ) && ( HoldReasonCode != 6 ) && ( HoldReasonCode != 14 ) && ( HoldReasonCode != 22 ); null", "JobRunCount": 1, "LastRemoteHost": "slot1@e406.chtc.wisc.edu", "JobPrio": 0, "LocalSysCpu": 0.0d, "ExecutableSize": 7, "RemoteSysCpu": 89.0d, "TransferInput": "/home/xguo23/finally_2/Simulation_condor/data/401/,/home/xguo23/finally_2/Simulation_condor/data/shared/", "PeriodicHold": false, "WantR
 emoteIO": true, "CommittedSuspensionTime": 0, "DAGParentNodeNames": "", "CompletionDate": 1446132649, "StreamErr": false, "RecentBlockReadKbytes": 0, "WantRemoteSyscalls": false, "NumShadowStarts": 1, "MachineAttrCpus0": 1, "DAGManNodesLog": "/home/xguo23/finally_2/Simulation_condor/model_3/./mydag.dag.nodes.log", "Owner": "xguo23", "Requirements": "null( ( OpSysMajorVer is 6 ) ) && ( MY.JobUniverse == 12 || MY.JobUniverse == 7 || MY.WantFlocking || MY.WantGlidein || TARGET.PoolName == \"CHTC\" || TARGET.COLLECTOR_HOST_STRING == \"infopool.cs.wisc.edu\" ) && ( TARGET.Arch == \"X86_64\" ) && ( TARGET.OpSys == \"LINUX\" ) && ( TARGET.Disk >= RequestDisk ) && ( TARGET.Memory >= RequestMemory ) && ( TARGET.HasFileTransfer ); null", "DiskUsage": 1250000, "JobLeaseDuration": 2400, "ClusterId": 49582065, "BufferSize": 524288, "IsCHTCSubmit": true, "RecentStatsLifetimeStarter": 1200, "CommittedSlotTime": 25607.0d, "Args": "--type=Matlab --version=R2011b --cmdtorun=simu_condor --unique=401 -
 - 3", "Environment": "", "LastPublicClaimId": "<128.105.245.206:27946>#1443991437#15826#...", "Iwd": "/home/xguo23/finally_2/Simulation_condor/model_3/401", "QDate": 1446105458, "CurrentHosts": 0, "User": "xguo23@chtc.wisc.edu", "StreamOut": false }
- { "GlobalJobId": "submit-3.chtc.wisc.edu#49582094.0#1446105491", "StatsLifetimeStarter": 25168, "JobStartDate": 1446107489, "SubmitEventNotes": "DAG Node: 106+106", "JobStatus": 4, "LeaveJobInQueue": false, "StartdPrincipal": "execute-side@matchsession/128.104.55.83", "WantRHEL6": true, "OnExitRemove": true, "JobCurrentStartDate": 1446107489, "CoreSize": 0, "MATCH_EXP_JOBGLIDEIN_ResourceName": "wisc.edu", "Rank": 0.0d, "ExitStatus": 0, "ResidentSetSize": 75000, "RemoteWallClockTime": 25169.0d, "WantCheckpoint": false, "In": "/dev/null", "MaxHosts": 1, "RootDir": "/", "NumRestarts": 0, "RecentBlockWriteKbytes": 4, "DiskUsage_RAW": 1216669, "EnteredCurrentStatus": 1446132658, "ResidentSetSize_RAW": 72016, "RequestDisk": 1000000, "MyType": "Job", "PeriodicRemove": false, "Cmd": "/home/xguo23/finally_2/Simulation_condor/chtcjobwrapper", "CondorVersion": "$CondorVersion: 8.5.0 Sep 16 2015 BuildID: 341710 $", "ShouldTransferFiles": "YES", "TargetType": "Machine", "MinHosts": 1, "NumCkpts
 _RAW": 0, "RequestCpus": 1, "RemoteUserCpu": 24949.0d, "BlockWrites": 1, "NiceUser": false, "Out": "process.out", "ImageSize_RAW": 119520, "BytesSent": 30486.0d, "CumulativeSuspensionTime": 0, "TransferIn": false, "NumCkpts": 0, "Err": "process.err", "RecentBlockWrites": 1, "JobFinishedHookDone": 1446132658, "ProcId": 0, "ImageSize": 125000, "JobUniverse": 5, "EncryptExecuteDirectory": false, "TransferInputSizeMB": 0, "RecentBlockReads": 86, "SpooledOutputFiles": "CURLTIME_122139,ChtcWrapper106.out,AuditLog.106,simu_3_106.txt,harvest.log,106.out", "BlockWriteKbytes": 4, "WhenToTransferOutput": "ON_EXIT", "JobCurrentStartExecutingDate": 1446107490, "ExitBySignal": false, "LastMatchTime": 1446107489, "OnExitHold": false, "OrigMaxHosts": 1, "RequestMemory": 1000, "NumJobStarts": 1, "TerminationPending": true, "TotalSuspensions": 0, "BlockReads": 665, "DAGManJobId": 49581933, "MemoryUsage": "null( ( ResidentSetSize + 1023 ) / 1024 ); null", "ExitCode": 0, "JobNotification": 0, "BlockRea
 dKbytes": 26620, "NumJobMatches": 1, "LocalUserCpu": 0.0d, "LastJobStatus": 2, "BufferBlockSize": 32768, "CommittedTime": 25169, "ExecutableSize_RAW": 6, "LastSuspensionTime": 0, "Matlab": "R2011b", "UserLog": "/home/xguo23/finally_2/Simulation_condor/model_3/106/process.log", "DAGManNodesMask": "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27", "CumulativeSlotTime": 25169.0d, "LastJobLeaseRenewal": 1446132658, "MachineAttrSlotWeight0": 1, "NumSystemHolds": 0, "BytesRecvd": 285054.0d, "CondorPlatform": "$CondorPlatform: X86_64-RedHat_6.6 $", "JOBGLIDEIN_ResourceName": "$$([IfThenElse(IsUndefined(TARGET.GLIDEIN_ResourceName), IfThenElse(IsUndefined(TARGET.GLIDEIN_Site), \"wisc.edu\", TARGET.GLIDEIN_Site), TARGET.GLIDEIN_ResourceName)])", "DAGNodeName": "106+106", "PeriodicRelease": "null( JobStatus == 5 ) && ( ( CurrentTime - EnteredCurrentStatus ) > 1800 ) && ( JobRunCount < 5 ) && ( HoldReasonCode != 6 ) && ( HoldReasonCode != 14 ) && ( HoldReasonCode != 22 ); null", "JobRunCount": 1, "LastR
 emoteHost": "slot1@c064.chtc.wisc.edu", "JobPrio": 0, "LocalSysCpu": 0.0d, "ExecutableSize": 7, "RemoteSysCpu": 204.0d, "TransferInput": "/home/xguo23/finally_2/Simulation_condor/data/106/,/home/xguo23/finally_2/Simulation_condor/data/shared/", "PeriodicHold": false, "WantRemoteIO": true, "CommittedSuspensionTime": 0, "DAGParentNodeNames": "", "CompletionDate": 1446132658, "StreamErr": false, "RecentBlockReadKbytes": 960, "WantRemoteSyscalls": false, "NumShadowStarts": 1, "MachineAttrCpus0": 1, "DAGManNodesLog": "/home/xguo23/finally_2/Simulation_condor/model_3/./mydag.dag.nodes.log", "Owner": "xguo23", "Requirements": "null( ( OpSysMajorVer is 6 ) ) && ( MY.JobUniverse == 12 || MY.JobUniverse == 7 || MY.WantFlocking || MY.WantGlidein || TARGET.PoolName == \"CHTC\" || TARGET.COLLECTOR_HOST_STRING == \"infopool.cs.wisc.edu\" ) && ( TARGET.Arch == \"X86_64\" ) && ( TARGET.OpSys == \"LINUX\" ) && ( TARGET.Disk >= RequestDisk ) && ( TARGET.Memory >= RequestMemory ) && ( TARGET.HasFileTr
 ansfer ); null", "DiskUsage": 1250000, "JobLeaseDuration": 2400, "ClusterId": 49582094, "BufferSize": 524288, "IsCHTCSubmit": true, "RecentStatsLifetimeStarter": 1200, "CommittedSlotTime": 25169.0d, "Args": "--type=Matlab --version=R2011b --cmdtorun=simu_condor --unique=106 -- 3", "Environment": "", "LastPublicClaimId": "<128.104.55.83:25899>#1445308581#1240#...", "Iwd": "/home/xguo23/finally_2/Simulation_condor/model_3/106", "QDate": 1446105491, "CurrentHosts": 0, "User": "xguo23@chtc.wisc.edu", "StreamOut": false }
- { "GlobalJobId": "submit-3.chtc.wisc.edu#49582098.0#1446105492", "StatsLifetimeStarter": 26020, "JobStartDate": 1446107489, "SubmitEventNotes": "DAG Node: 304+304", "JobStatus": 4, "LeaveJobInQueue": false, "StartdPrincipal": "execute-side@matchsession/128.105.245.223", "WantRHEL6": true, "OnExitRemove": true, "JobCurrentStartDate": 1446107489, "CoreSize": 0, "MATCH_EXP_JOBGLIDEIN_ResourceName": "wisc.edu", "Rank": 0.0d, "ExitStatus": 0, "ResidentSetSize": 150000, "RemoteWallClockTime": 26022.0d, "WantCheckpoint": false, "In": "/dev/null", "MaxHosts": 1, "RootDir": "/", "NumRestarts": 0, "RecentBlockWriteKbytes": 0, "DiskUsage_RAW": 1216669, "EnteredCurrentStatus": 1446133511, "ResidentSetSize_RAW": 128776, "RequestDisk": 1000000, "MyType": "Job", "PeriodicRemove": false, "Cmd": "/home/xguo23/finally_2/Simulation_condor/chtcjobwrapper", "CondorVersion": "$CondorVersion: 8.5.0 Sep 16 2015 BuildID: 341710 $", "ShouldTransferFiles": "YES", "TargetType": "Machine", "MinHosts": 1, "NumC
 kpts_RAW": 0, "RequestCpus": 1, "RemoteUserCpu": 25844.0d, "BlockWrites": 0, "NiceUser": false, "Out": "process.out", "ImageSize_RAW": 811948, "BytesSent": 31801.0d, "CumulativeSuspensionTime": 0, "TransferIn": false, "NumCkpts": 0, "Err": "process.err", "RecentBlockWrites": 0, "JobFinishedHookDone": 1446133511, "ProcId": 0, "ImageSize": 1000000, "JobUniverse": 5, "EncryptExecuteDirectory": false, "TransferInputSizeMB": 0, "RecentBlockReads": 0, "SpooledOutputFiles": "CURLTIME_3651606,ChtcWrapper304.out,AuditLog.304,simu_3_304.txt,harvest.log,304.out", "BlockWriteKbytes": 0, "WhenToTransferOutput": "ON_EXIT", "JobCurrentStartExecutingDate": 1446107491, "ExitBySignal": false, "LastMatchTime": 1446107489, "OnExitHold": false, "OrigMaxHosts": 1, "RequestMemory": 1000, "NumJobStarts": 1, "TerminationPending": true, "TotalSuspensions": 0, "BlockReads": 0, "DAGManJobId": 49581933, "MemoryUsage": "null( ( ResidentSetSize + 1023 ) / 1024 ); null", "ExitCode": 0, "JobNotification": 0, "Block
 ReadKbytes": 0, "NumJobMatches": 1, "LocalUserCpu": 0.0d, "LastJobStatus": 2, "BufferBlockSize": 32768, "CommittedTime": 26022, "ExecutableSize_RAW": 6, "LastSuspensionTime": 0, "Matlab": "R2011b", "UserLog": "/home/xguo23/finally_2/Simulation_condor/model_3/304/process.log", "DAGManNodesMask": "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27", "CumulativeSlotTime": 26022.0d, "LastJobLeaseRenewal": 1446133511, "MachineAttrSlotWeight0": 1, "NumSystemHolds": 0, "BytesRecvd": 285054.0d, "CondorPlatform": "$CondorPlatform: X86_64-RedHat_6.6 $", "JOBGLIDEIN_ResourceName": "$$([IfThenElse(IsUndefined(TARGET.GLIDEIN_ResourceName), IfThenElse(IsUndefined(TARGET.GLIDEIN_Site), \"wisc.edu\", TARGET.GLIDEIN_Site), TARGET.GLIDEIN_ResourceName)])", "DAGNodeName": "304+304", "PeriodicRelease": "null( JobStatus == 5 ) && ( ( CurrentTime - EnteredCurrentStatus ) > 1800 ) && ( JobRunCount < 5 ) && ( HoldReasonCode != 6 ) && ( HoldReasonCode != 14 ) && ( HoldReasonCode != 22 ); null", "JobRunCount": 1, "LastRe
 moteHost": "slot1@e423.chtc.wisc.edu", "JobPrio": 0, "LocalSysCpu": 0.0d, "ExecutableSize": 7, "RemoteSysCpu": 143.0d, "TransferInput": "/home/xguo23/finally_2/Simulation_condor/data/304/,/home/xguo23/finally_2/Simulation_condor/data/shared/", "PeriodicHold": false, "WantRemoteIO": true, "CommittedSuspensionTime": 0, "DAGParentNodeNames": "", "CompletionDate": 1446133511, "StreamErr": false, "RecentBlockReadKbytes": 0, "WantRemoteSyscalls": false, "NumShadowStarts": 1, "MachineAttrCpus0": 1, "DAGManNodesLog": "/home/xguo23/finally_2/Simulation_condor/model_3/./mydag.dag.nodes.log", "Owner": "xguo23", "Requirements": "null( ( OpSysMajorVer is 6 ) ) && ( MY.JobUniverse == 12 || MY.JobUniverse == 7 || MY.WantFlocking || MY.WantGlidein || TARGET.PoolName == \"CHTC\" || TARGET.COLLECTOR_HOST_STRING == \"infopool.cs.wisc.edu\" ) && ( TARGET.Arch == \"X86_64\" ) && ( TARGET.OpSys == \"LINUX\" ) && ( TARGET.Disk >= RequestDisk ) && ( TARGET.Memory >= RequestMemory ) && ( TARGET.HasFileTrans
 fer ); null", "DiskUsage": 1250000, "JobLeaseDuration": 2400, "ClusterId": 49582098, "BufferSize": 524288, "IsCHTCSubmit": true, "RecentStatsLifetimeStarter": 1200, "CommittedSlotTime": 26022.0d, "Args": "--type=Matlab --version=R2011b --cmdtorun=simu_condor --unique=304 -- 3", "Environment": "", "LastPublicClaimId": "<128.105.245.223:13467>#1444760039#6376#...", "Iwd": "/home/xguo23/finally_2/Simulation_condor/model_3/304", "QDate": 1446105492, "CurrentHosts": 0, "User": "xguo23@chtc.wisc.edu", "StreamOut": false }
- { "GlobalJobId": "submit-3.chtc.wisc.edu#49582113.0#1446105509", "StatsLifetimeStarter": 26044, "JobStartDate": 1446107490, "SubmitEventNotes": "DAG Node: 206+206", "JobStatus": 4, "LeaveJobInQueue": false, "StartdPrincipal": "execute-side@matchsession/128.105.245.120", "WantRHEL6": true, "OnExitRemove": true, "JobCurrentStartDate": 1446107490, "CoreSize": 0, "MATCH_EXP_JOBGLIDEIN_ResourceName": "wisc.edu", "Rank": 0.0d, "ExitStatus": 0, "ResidentSetSize": 150000, "RemoteWallClockTime": 26045.0d, "WantCheckpoint": false, "In": "/dev/null", "MaxHosts": 1, "RootDir": "/", "NumRestarts": 0, "RecentBlockWriteKbytes": 0, "DiskUsage_RAW": 1216669, "EnteredCurrentStatus": 1446133535, "ResidentSetSize_RAW": 126460, "RequestDisk": 1000000, "MyType": "Job", "PeriodicRemove": false, "Cmd": "/home/xguo23/finally_2/Simulation_condor/chtcjobwrapper", "CondorVersion": "$CondorVersion: 8.5.0 Sep 16 2015 BuildID: 341710 $", "ShouldTransferFiles": "YES", "TargetType": "Machine", "MinHosts": 1, "NumC
 kpts_RAW": 0, "RequestCpus": 1, "RemoteUserCpu": 25939.0d, "BlockWrites": 0, "NiceUser": false, "Out": "process.out", "ImageSize_RAW": 811948, "BytesSent": 30596.0d, "CumulativeSuspensionTime": 0, "TransferIn": false, "NumCkpts": 0, "Err": "process.err", "RecentBlockWrites": 0, "JobFinishedHookDone": 1446133535, "ProcId": 0, "ImageSize": 1000000, "JobUniverse": 5, "EncryptExecuteDirectory": false, "TransferInputSizeMB": 0, "RecentBlockReads": 0, "SpooledOutputFiles": "CURLTIME_522843,ChtcWrapper206.out,AuditLog.206,simu_3_206.txt,harvest.log,206.out", "BlockWriteKbytes": 0, "WhenToTransferOutput": "ON_EXIT", "JobCurrentStartExecutingDate": 1446107491, "ExitBySignal": false, "LastMatchTime": 1446107490, "OnExitHold": false, "OrigMaxHosts": 1, "RequestMemory": 1000, "NumJobStarts": 1, "TerminationPending": true, "TotalSuspensions": 0, "BlockReads": 0, "DAGManJobId": 49581933, "MemoryUsage": "null( ( ResidentSetSize + 1023 ) / 1024 ); null", "ExitCode": 0, "JobNotification": 0, "BlockR
 eadKbytes": 0, "NumJobMatches": 1, "LocalUserCpu": 0.0d, "LastJobStatus": 2, "BufferBlockSize": 32768, "CommittedTime": 26045, "ExecutableSize_RAW": 6, "LastSuspensionTime": 0, "Matlab": "R2011b", "UserLog": "/home/xguo23/finally_2/Simulation_condor/model_3/206/process.log", "DAGManNodesMask": "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27", "CumulativeSlotTime": 26045.0d, "LastJobLeaseRenewal": 1446133535, "MachineAttrSlotWeight0": 1, "NumSystemHolds": 0, "BytesRecvd": 285054.0d, "CondorPlatform": "$CondorPlatform: X86_64-RedHat_6.6 $", "JOBGLIDEIN_ResourceName": "$$([IfThenElse(IsUndefined(TARGET.GLIDEIN_ResourceName), IfThenElse(IsUndefined(TARGET.GLIDEIN_Site), \"wisc.edu\", TARGET.GLIDEIN_Site), TARGET.GLIDEIN_ResourceName)])", "DAGNodeName": "206+206", "PeriodicRelease": "null( JobStatus == 5 ) && ( ( CurrentTime - EnteredCurrentStatus ) > 1800 ) && ( JobRunCount < 5 ) && ( HoldReasonCode != 6 ) && ( HoldReasonCode != 14 ) && ( HoldReasonCode != 22 ); null", "JobRunCount": 1, "LastRem
 oteHost": "slot1@e320.chtc.wisc.edu", "JobPrio": 0, "LocalSysCpu": 0.0d, "ExecutableSize": 7, "RemoteSysCpu": 87.0d, "TransferInput": "/home/xguo23/finally_2/Simulation_condor/data/206/,/home/xguo23/finally_2/Simulation_condor/data/shared/", "PeriodicHold": false, "WantRemoteIO": true, "CommittedSuspensionTime": 0, "DAGParentNodeNames": "", "CompletionDate": 1446133535, "StreamErr": false, "RecentBlockReadKbytes": 0, "WantRemoteSyscalls": false, "NumShadowStarts": 1, "MachineAttrCpus0": 1, "DAGManNodesLog": "/home/xguo23/finally_2/Simulation_condor/model_3/./mydag.dag.nodes.log", "Owner": "xguo23", "Requirements": "null( ( OpSysMajorVer is 6 ) ) && ( MY.JobUniverse == 12 || MY.JobUniverse == 7 || MY.WantFlocking || MY.WantGlidein || TARGET.PoolName == \"CHTC\" || TARGET.COLLECTOR_HOST_STRING == \"infopool.cs.wisc.edu\" ) && ( TARGET.Arch == \"X86_64\" ) && ( TARGET.OpSys == \"LINUX\" ) && ( TARGET.Disk >= RequestDisk ) && ( TARGET.Memory >= RequestMemory ) && ( TARGET.HasFileTransfe
 r ); null", "DiskUsage": 1250000, "JobLeaseDuration": 2400, "ClusterId": 49582113, "BufferSize": 524288, "IsCHTCSubmit": true, "RecentStatsLifetimeStarter": 1200, "CommittedSlotTime": 26045.0d, "Args": "--type=Matlab --version=R2011b --cmdtorun=simu_condor --unique=206 -- 3", "Environment": "", "LastPublicClaimId": "<128.105.245.120:45185>#1443991409#14238#...", "Iwd": "/home/xguo23/finally_2/Simulation_condor/model_3/206", "QDate": 1446105509, "CurrentHosts": 0, "User": "xguo23@chtc.wisc.edu", "StreamOut": false }
- { "GlobalJobId": "submit-3.chtc.wisc.edu#49582119.0#1446105519", "StatsLifetimeStarter": 24928, "JobStartDate": 1446107490, "SubmitEventNotes": "DAG Node: 152+152", "JobStatus": 4, "LeaveJobInQueue": false, "StartdPrincipal": "execute-side@matchsession/128.105.245.242", "WantRHEL6": true, "OnExitRemove": true, "JobCurrentStartDate": 1446107490, "CoreSize": 0, "MATCH_EXP_JOBGLIDEIN_ResourceName": "wisc.edu", "Rank": 0.0d, "ExitStatus": 0, "ResidentSetSize": 150000, "RemoteWallClockTime": 24930.0d, "WantCheckpoint": false, "In": "/dev/null", "MaxHosts": 1, "RootDir": "/", "NumRestarts": 0, "RecentBlockWriteKbytes": 0, "DiskUsage_RAW": 1216669, "EnteredCurrentStatus": 1446132420, "ResidentSetSize_RAW": 128972, "RequestDisk": 1000000, "MyType": "Job", "PeriodicRemove": false, "Cmd": "/home/xguo23/finally_2/Simulation_condor/chtcjobwrapper", "CondorVersion": "$CondorVersion: 8.5.0 Sep 16 2015 BuildID: 341710 $", "ShouldTransferFiles": "YES", "TargetType": "Machine", "MinHosts": 1, "NumC
 kpts_RAW": 0, "RequestCpus": 1, "RemoteUserCpu": 24742.0d, "BlockWrites": 0, "NiceUser": false, "Out": "process.out", "ImageSize_RAW": 811948, "BytesSent": 30431.0d, "CumulativeSuspensionTime": 0, "TransferIn": false, "NumCkpts": 0, "Err": "process.err", "RecentBlockWrites": 0, "JobFinishedHookDone": 1446132420, "ProcId": 0, "ImageSize": 1000000, "JobUniverse": 5, "EncryptExecuteDirectory": false, "TransferInputSizeMB": 0, "RecentBlockReads": 0, "SpooledOutputFiles": "CURLTIME_674,ChtcWrapper152.out,AuditLog.152,simu_3_152.txt,harvest.log,152.out", "BlockWriteKbytes": 0, "WhenToTransferOutput": "ON_EXIT", "JobCurrentStartExecutingDate": 1446107491, "ExitBySignal": false, "LastMatchTime": 1446107490, "OnExitHold": false, "OrigMaxHosts": 1, "RequestMemory": 1000, "NumJobStarts": 1, "TerminationPending": true, "TotalSuspensions": 0, "BlockReads": 0, "DAGManJobId": 49581933, "MemoryUsage": "null( ( ResidentSetSize + 1023 ) / 1024 ); null", "ExitCode": 0, "JobNotification": 0, "BlockRead
 Kbytes": 0, "NumJobMatches": 1, "LocalUserCpu": 0.0d, "LastJobStatus": 2, "BufferBlockSize": 32768, "CommittedTime": 24930, "ExecutableSize_RAW": 6, "LastSuspensionTime": 0, "Matlab": "R2011b", "UserLog": "/home/xguo23/finally_2/Simulation_condor/model_3/152/process.log", "DAGManNodesMask": "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27", "CumulativeSlotTime": 24930.0d, "LastJobLeaseRenewal": 1446132420, "MachineAttrSlotWeight0": 1, "NumSystemHolds": 0, "BytesRecvd": 285054.0d, "CondorPlatform": "$CondorPlatform: X86_64-RedHat_6.6 $", "JOBGLIDEIN_ResourceName": "$$([IfThenElse(IsUndefined(TARGET.GLIDEIN_ResourceName), IfThenElse(IsUndefined(TARGET.GLIDEIN_Site), \"wisc.edu\", TARGET.GLIDEIN_Site), TARGET.GLIDEIN_ResourceName)])", "DAGNodeName": "152+152", "PeriodicRelease": "null( JobStatus == 5 ) && ( ( CurrentTime - EnteredCurrentStatus ) > 1800 ) && ( JobRunCount < 5 ) && ( HoldReasonCode != 6 ) && ( HoldReasonCode != 14 ) && ( HoldReasonCode != 22 ); null", "JobRunCount": 1, "LastRemote
 Host": "slot1@e442.chtc.WISC.EDU", "JobPrio": 0, "LocalSysCpu": 0.0d, "ExecutableSize": 7, "RemoteSysCpu": 156.0d, "TransferInput": "/home/xguo23/finally_2/Simulation_condor/data/152/,/home/xguo23/finally_2/Simulation_condor/data/shared/", "PeriodicHold": false, "WantRemoteIO": true, "CommittedSuspensionTime": 0, "DAGParentNodeNames": "", "CompletionDate": 1446132420, "StreamErr": false, "RecentBlockReadKbytes": 0, "WantRemoteSyscalls": false, "NumShadowStarts": 1, "MachineAttrCpus0": 1, "DAGManNodesLog": "/home/xguo23/finally_2/Simulation_condor/model_3/./mydag.dag.nodes.log", "Owner": "xguo23", "Requirements": "null( ( OpSysMajorVer is 6 ) ) && ( MY.JobUniverse == 12 || MY.JobUniverse == 7 || MY.WantFlocking || MY.WantGlidein || TARGET.PoolName == \"CHTC\" || TARGET.COLLECTOR_HOST_STRING == \"infopool.cs.wisc.edu\" ) && ( TARGET.Arch == \"X86_64\" ) && ( TARGET.OpSys == \"LINUX\" ) && ( TARGET.Disk >= RequestDisk ) && ( TARGET.Memory >= RequestMemory ) && ( TARGET.HasFileTransfer 
 ); null", "DiskUsage": 1250000, "JobLeaseDuration": 2400, "ClusterId": 49582119, "BufferSize": 524288, "IsCHTCSubmit": true, "RecentStatsLifetimeStarter": 1200, "CommittedSlotTime": 24930.0d, "Args": "--type=Matlab --version=R2011b --cmdtorun=simu_condor --unique=152 -- 3", "Environment": "", "LastPublicClaimId": "<128.105.245.242:38884>#1443991450#10374#...", "Iwd": "/home/xguo23/finally_2/Simulation_condor/model_3/152", "QDate": 1446105519, "CurrentHosts": 0, "User": "xguo23@chtc.wisc.edu", "StreamOut": false }
- { "GlobalJobId": "submit-3.chtc.wisc.edu#49582124.0#1446105525", "StatsLifetimeStarter": 24745, "JobStartDate": 1446107685, "SubmitEventNotes": "DAG Node: 323+323", "JobStatus": 4, "LeaveJobInQueue": false, "AutoClusterId": 13, "StartdPrincipal": "execute-side@matchsession/128.104.55.89", "WantRHEL6": true, "OnExitRemove": true, "JobCurrentStartDate": 1446107685, "CoreSize": 0, "MATCH_EXP_JOBGLIDEIN_ResourceName": "wisc.edu", "ExitStatus": 0, "Rank": 0.0d, "ResidentSetSize": 75000, "RemoteWallClockTime": 24748.0d, "WantCheckpoint": false, "In": "/dev/null", "MaxHosts": 1, "RootDir": "/", "NumRestarts": 0, "RecentBlockWriteKbytes": 0, "DiskUsage_RAW": 1216669, "EnteredCurrentStatus": 1446132433, "ResidentSetSize_RAW": 71248, "RequestDisk": 1000000, "MyType": "Job", "PeriodicRemove": false, "Cmd": "/home/xguo23/finally_2/Simulation_condor/chtcjobwrapper", "CondorVersion": "$CondorVersion: 8.5.0 Sep 16 2015 BuildID: 341710 $", "ShouldTransferFiles": "YES", "TargetType": "Machine", "Mi
 nHosts": 1, "NumCkpts_RAW": 0, "RequestCpus": 1, "RemoteUserCpu": 21145.0d, "BlockWrites": 1, "NiceUser": false, "Out": "process.out", "ImageSize_RAW": 118000, "BytesSent": 30560.0d, "CumulativeSuspensionTime": 0, "TransferIn": false, "AutoClusterAttrs": "JobUniverse,LastCheckpointPlatform,NumCkpts,ClientMachine,_condor_RequestCpus,_condor_RequestDisk,_condor_RequestGPUs,_condor_RequestMemory,RequestCpus,RequestDisk,RequestGPUs,RequestMemory,BIOCHEM,MachineLastMatchTime,ConcurrencyLimits,NiceUser,Rank,Requirements,ImageSize,MemoryRequirements,User,RemoteGroup,SubmitterGroup,SubmitterUserPrio,Group,WIDsTheme,InteractiveJob,Is_Resumable,WantFlocking,WantGlidein,Scheduler,Owner,JobStart,MemoryUsage,IsExpressQueueJob,DiskUsage,HEP_VO,IsDesktop,OSG_VO,x509userproxysubject,PassedTest,IsLocalCMSJob,IsLocalCMSSlot,IsSAMSlot,IsSAMJob,MaxDiskTempC,IsDedicated,estimated_run_hours,IsCHTCSubmit,RequiresCVMFS,DynamicSlot,PartitionableSlot,Slot1_ExpectedMachineGracefulDrainingCompletion,Slot1_JobS
 tarts,Slot1_SelfMonitorAge,Slot2_ExpectedMachineGracefulDrainingCompletion,Slot2_JobStarts,Slot2_SelfMonitorAge,Slot3_ExpectedMachineGracefulDrainingCompletion,Slot3_JobStarts,Slot3_SelfMonitorAge,Slot4_ExpectedMachineGracefulDrainingCompletion,Slot4_JobStarts,Slot4_SelfMonitorAge,Slot5_ExpectedMachineGracefulDrainingCompletion,Slot5_JobStarts,Slot5_SelfMonitorAge,Slot6_ExpectedMachineGracefulDrainingCompletion,Slot6_JobStarts,Slot6_SelfMonitorAge,Slot7_ExpectedMachineGracefulDrainingCompletion,Slot7_JobStarts,Slot7_SelfMonitorAge,Slot8_ExpectedMachineGracefulDrainingCompletion,Slot8_JobStarts,Slot8_SelfMonitorAge,Slot1_TotalTimeClaimedBusy,Slot1_TotalTimeUnclaimedIdle,Slot2_TotalTimeClaimedBusy,Slot2_TotalTimeUnclaimedIdle,Slot3_TotalTimeClaimedBusy,Slot3_TotalTimeUnclaimedIdle,Slot4_TotalTimeClaimedBusy,Slot4_TotalTimeUnclaimedIdle,Slot5_TotalTimeClaimedBusy,Slot5_TotalTimeUnclaimedIdle,Slot6_TotalTimeClaimedBusy,Slot6_TotalTimeUnclaimedIdle,Slot7_TotalTimeClaimedBusy,Slot7_TotalT
 imeUnclaimedIdle,Slot8_TotalTimeClaimedBusy,Slot8_TotalTimeUnclaimedIdle,Slot10_ExpectedMachineGracefulDrainingCompletion,Slot10_JobStarts,Slot10_SelfMonitorAge,Slot11_ExpectedMachineGracefulDrainingCompletion,Slot11_JobStarts,Slot11_SelfMonitorAge,Slot12_ExpectedMachineGracefulDrainingCompletion,Slot12_JobStarts,Slot12_SelfMonitorAge,Slot9_ExpectedMachineGracefulDrainingCompletion,Slot9_JobStarts,Slot9_SelfMonitorAge,Slot12_TotalTimeClaimedBusy,Slot10_TotalTimeClaimedBusy,Slot10_TotalTimeUnclaimedIdle,Slot11_TotalTimeClaimedBusy,Slot11_TotalTimeUnclaimedIdle,Slot12_TotalTimeUnclaimedIdle,Slot9_TotalTimeClaimedBusy,Slot9_TotalTimeUnclaimedIdle,Slot13_ExpectedMachineGracefulDrainingCompletion,Slot13_JobStarts,Slot13_SelfMonitorAge,Slot14_ExpectedMachineGracefulDrainingCompletion,Slot14_JobStarts,Slot14_SelfMonitorAge,Slot15_ExpectedMachineGracefulDrainingCompletion,Slot15_JobStarts,Slot15_SelfMonitorAge,Slot16_ExpectedMachineGracefulDrainingCompletion,Slot16_JobStarts,Slot16_SelfMoni
 torAge,IsResumable,WHEN_TO_TRANSFER_OUTPUT,_condor_Requestadmin_mutex_1,_condor_Requestadmin_mutex_2,_condor_Requestadmin_mutex_3,_condor_Requestmachine_token,Requestadmin_mutex_1,Requestadmin_mutex_2,Requestadmin_mutex_3,Requestmachine_token,nyehle,IsBuildJob,IsMatlabBuildJob,TotalJobRunTime,NodeOnline,Slot13_TotalTimeClaimedBusy,Slot13_TotalTimeUnclaimedIdle,Slot14_TotalTimeClaimedBusy,Slot14_TotalTimeUnclaimedIdle,Slot15_TotalTimeClaimedBusy,Slot15_TotalTimeUnclaimedIdle,Slot16_TotalTimeClaimedBusy,Slot16_TotalTimeUnclaimedIdle,TmpIsFull,trResumable,RequiresCMSFrontier,Slot17_ExpectedMachineGracefulDrainingCompletion,Slot17_JobStarts,Slot17_SelfMonitorAge,Slot17_TotalTimeClaimedBusy,Slot17_TotalTimeUnclaimedIdle,Slot18_ExpectedMachineGracefulDrainingCompletion,Slot18_JobStarts,Slot18_SelfMonitorAge,Slot18_TotalTimeClaimedBusy,Slot18_TotalTimeUnclaimedIdle,Slot19_ExpectedMachineGracefulDrainingCompletion,Slot19_JobStarts,Slot19_SelfMonitorAge,Slot19_TotalTimeClaimedBusy,Slot19_Tot
 alTimeUnclaimedIdle,Slot20_ExpectedMachineGracefulDrainingCompletion,Slot20_JobStarts,Slot20_SelfMonitorAge,Slot20_TotalTimeClaimedBusy,Slot20_TotalTimeUnclaimedIdle,Slot21_ExpectedMachineGracefulDrainingCompletion,Slot21_JobStarts,Slot21_SelfMonitorAge,Slot21_TotalTimeClaimedBusy,Slot21_TotalTimeUnclaimedIdle,Slot22_ExpectedMachineGracefulDrainingCompletion,Slot22_JobStarts,Slot22_SelfMonitorAge,Slot22_TotalTimeClaimedBusy,Slot22_TotalTimeUnclaimedIdle,Slot23_ExpectedMachineGracefulDrainingCompletion,Slot23_JobStarts,Slot23_SelfMonitorAge,Slot23_TotalTimeClaimedBusy,Slot23_TotalTimeUnclaimedIdle,Slot24_ExpectedMachineGracefulDrainingCompletion,Slot24_JobStarts,Slot24_SelfMonitorAge,Slot24_TotalTimeClaimedBusy,Slot24_TotalTimeUnclaimedIdle,Slot25_ExpectedMachineGracefulDrainingCompletion,Slot25_JobStarts,Slot25_SelfMonitorAge,Slot25_TotalTimeClaimedBusy,Slot25_TotalTimeUnclaimedIdle,Slot26_ExpectedMachineGracefulDrainingCompletion,Slot26_JobStarts,Slot26_SelfMonitorAge,Slot26_TotalT
 imeClaimedBusy,Slot26_TotalTimeUnclaimedIdle,Slot27_ExpectedMachineGracefulDrainingCompletion,Slot27_JobStarts,Slot27_SelfMonitorAge,Slot27_TotalTimeClaimedBusy,Slot27_TotalTimeUnclaimedIdle,Slot28_ExpectedMachineGracefulDrainingCompletion,Slot28_JobStarts,Slot28_SelfMonitorAge,Slot28_TotalTimeClaimedBusy,Slot28_TotalTimeUnclaimedIdle,Slot29_ExpectedMachineGracefulDrainingCompletion,Slot29_JobStarts,Slot29_SelfMonitorAge,Slot29_TotalTimeClaimedBusy,Slot29_TotalTimeUnclaimedIdle,Slot30_ExpectedMachineGracefulDrainingCompletion,Slot30_JobStarts,Slot30_SelfMonitorAge,Slot30_TotalTimeClaimedBusy,Slot30_TotalTimeUnclaimedIdle,Slot31_ExpectedMachineGracefulDrainingCompletion,Slot31_JobStarts,Slot31_SelfMonitorAge,Slot31_TotalTimeClaimedBusy,Slot31_TotalTimeUnclaimedIdle,Slot32_ExpectedMachineGracefulDrainingCompletion,Slot32_JobStarts,Slot32_SelfMonitorAge,Slot32_TotalTimeClaimedBusy,Slot32_TotalTimeUnclaimedIdle,ResidentSetSize", "NumCkpts": 0, "Err": "process.err", "RecentBlockWrites": 
 0, "JobFinishedHookDone": 1446132434, "ProcId": 0, "ImageSize": 125000, "JobUniverse": 5, "EncryptExecuteDirectory": false, "TransferInputSizeMB": 0, "RecentBlockReads": 314, "SpooledOutputFiles": "harvest.log,CURLTIME_3853266,ChtcWrapper323.out,AuditLog.323,simu_3_323.txt,323.out", "BlockWriteKbytes": 4, "WhenToTransferOutput": "ON_EXIT", "JobCurrentStartExecutingDate": 1446107686, "ExitBySignal": false, "LastMatchTime": 1446107685, "OnExitHold": false, "OrigMaxHosts": 1, "RequestMemory": 1000, "NumJobStarts": 1, "TerminationPending": true, "TotalSuspensions": 0, "BlockReads": 1142, "DAGManJobId": 49581933, "MemoryUsage": "null( ( ResidentSetSize + 1023 ) / 1024 ); null", "ExitCode": 0, "JobNotification": 0, "BlockReadKbytes": 43788, "NumJobMatches": 1, "LocalUserCpu": 0.0d, "LastJobStatus": 2, "BufferBlockSize": 32768, "CommittedTime": 24748, "ExecutableSize_RAW": 6, "LastSuspensionTime": 0, "Matlab": "R2011b", "UserLog": "/home/xguo23/finally_2/Simulation_condor/model_3/323/proce
 ss.log", "DAGManNodesMask": "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27", "CumulativeSlotTime": 24748.0d, "LastJobLeaseRenewal": 1446132433, "MachineAttrSlotWeight0": 1, "NumSystemHolds": 0, "BytesRecvd": 285054.0d, "CondorPlatform": "$CondorPlatform: X86_64-RedHat_6.6 $", "JOBGLIDEIN_ResourceName": "$$([IfThenElse(IsUndefined(TARGET.GLIDEIN_ResourceName), IfThenElse(IsUndefined(TARGET.GLIDEIN_Site), \"wisc.edu\", TARGET.GLIDEIN_Site), TARGET.GLIDEIN_ResourceName)])", "DAGNodeName": "323+323", "PeriodicRelease": "null( JobStatus == 5 ) && ( ( CurrentTime - EnteredCurrentStatus ) > 1800 ) && ( JobRunCount < 5 ) && ( HoldReasonCode != 6 ) && ( HoldReasonCode != 14 ) && ( HoldReasonCode != 22 ); null", "JobRunCount": 1, "LastRemoteHost": "slot1@c070.chtc.wisc.edu", "JobPrio": 0, "LocalSysCpu": 0.0d, "ExecutableSize": 7, "RemoteSysCpu": 175.0d, "TransferInput": "/home/xguo23/finally_2/Simulation_condor/data/323/,/home/xguo23/finally_2/Simulation_condor/data/shared/", "PeriodicHold": false, "
 WantRemoteIO": true, "CommittedSuspensionTime": 0, "DAGParentNodeNames": "", "CompletionDate": 1446132433, "StreamErr": false, "RecentBlockReadKbytes": 4224, "WantRemoteSyscalls": false, "NumShadowStarts": 1, "MachineAttrCpus0": 1, "DAGManNodesLog": "/home/xguo23/finally_2/Simulation_condor/model_3/./mydag.dag.nodes.log", "Owner": "xguo23", "Requirements": "null( ( OpSysMajorVer is 6 ) ) && ( MY.JobUniverse == 12 || MY.JobUniverse == 7 || MY.WantFlocking || MY.WantGlidein || TARGET.PoolName == \"CHTC\" || TARGET.COLLECTOR_HOST_STRING == \"infopool.cs.wisc.edu\" ) && ( TARGET.Arch == \"X86_64\" ) && ( TARGET.OpSys == \"LINUX\" ) && ( TARGET.Disk >= RequestDisk ) && ( TARGET.Memory >= RequestMemory ) && ( TARGET.HasFileTransfer ); null", "DiskUsage": 1250000, "JobLeaseDuration": 2400, "ClusterId": 49582124, "BufferSize": 524288, "IsCHTCSubmit": true, "RecentStatsLifetimeStarter": 1200, "CommittedSlotTime": 24748.0d, "Args": "--type=Matlab --version=R2011b --cmdtorun=simu_condor --uniq
 ue=323 -- 3", "Environment": "", "LastPublicClaimId": "<128.104.55.89:32652>#1445371750#1302#...", "Iwd": "/home/xguo23/finally_2/Simulation_condor/model_3/323", "QDate": 1446105525, "CurrentHosts": 0, "User": "xguo23@chtc.wisc.edu", "StreamOut": false }
- { "GlobalJobId": "submit-3.chtc.wisc.edu#49582148.0#1446105547", "StatsLifetimeStarter": 26230, "JobStartDate": 1446107686, "SubmitEventNotes": "DAG Node: 162+162", "JobStatus": 4, "LeaveJobInQueue": false, "AutoClusterId": 24, "StartdPrincipal": "execute-side@matchsession/128.105.245.170", "WantRHEL6": true, "OnExitRemove": true, "JobCurrentStartDate": 1446107686, "CoreSize": 0, "MATCH_EXP_JOBGLIDEIN_ResourceName": "wisc.edu", "ExitStatus": 0, "Rank": 0.0d, "ResidentSetSize": 150000, "RemoteWallClockTime": 26233.0d, "WantCheckpoint": false, "In": "/dev/null", "MaxHosts": 1, "RootDir": "/", "NumRestarts": 0, "RecentBlockWriteKbytes": 0, "DiskUsage_RAW": 1216669, "EnteredCurrentStatus": 1446133919, "ResidentSetSize_RAW": 126384, "RequestDisk": 1000000, "MyType": "Job", "PeriodicRemove": false, "Cmd": "/home/xguo23/finally_2/Simulation_condor/chtcjobwrapper", "CondorVersion": "$CondorVersion: 8.5.0 Sep 16 2015 BuildID: 341710 $", "ShouldTransferFiles": "YES", "TargetType": "Machine",
  "MinHosts": 1, "NumCkpts_RAW": 0, "RequestCpus": 1, "RemoteUserCpu": 26088.0d, "BlockWrites": 0, "NiceUser": false, "Out": "process.out", "ImageSize_RAW": 811948, "BytesSent": 30612.0d, "CumulativeSuspensionTime": 0, "TransferIn": false, "AutoClusterAttrs": "JobUniverse,LastCheckpointPlatform,NumCkpts,ClientMachine,_condor_RequestCpus,_condor_RequestDisk,_condor_RequestGPUs,_condor_RequestMemory,RequestCpus,RequestDisk,RequestGPUs,RequestMemory,BIOCHEM,MachineLastMatchTime,ConcurrencyLimits,NiceUser,Rank,Requirements,ImageSize,MemoryRequirements,User,RemoteGroup,SubmitterGroup,SubmitterUserPrio,Group,WIDsTheme,InteractiveJob,Is_Resumable,WantFlocking,WantGlidein,Scheduler,Owner,JobStart,MemoryUsage,IsExpressQueueJob,DiskUsage,HEP_VO,IsDesktop,OSG_VO,x509userproxysubject,PassedTest,IsLocalCMSJob,IsLocalCMSSlot,IsSAMSlot,IsSAMJob,MaxDiskTempC,IsDedicated,estimated_run_hours,IsCHTCSubmit,RequiresCVMFS,DynamicSlot,PartitionableSlot,Slot1_ExpectedMachineGracefulDrainingCompletion,Slot1_
 JobStarts,Slot1_SelfMonitorAge,Slot2_ExpectedMachineGracefulDrainingCompletion,Slot2_JobStarts,Slot2_SelfMonitorAge,Slot3_ExpectedMachineGracefulDrainingCompletion,Slot3_JobStarts,Slot3_SelfMonitorAge,Slot4_ExpectedMachineGracefulDrainingCompletion,Slot4_JobStarts,Slot4_SelfMonitorAge,Slot5_ExpectedMachineGracefulDrainingCompletion,Slot5_JobStarts,Slot5_SelfMonitorAge,Slot6_ExpectedMachineGracefulDrainingCompletion,Slot6_JobStarts,Slot6_SelfMonitorAge,Slot7_ExpectedMachineGracefulDrainingCompletion,Slot7_JobStarts,Slot7_SelfMonitorAge,Slot8_ExpectedMachineGracefulDrainingCompletion,Slot8_JobStarts,Slot8_SelfMonitorAge,Slot1_TotalTimeClaimedBusy,Slot1_TotalTimeUnclaimedIdle,Slot2_TotalTimeClaimedBusy,Slot2_TotalTimeUnclaimedIdle,Slot3_TotalTimeClaimedBusy,Slot3_TotalTimeUnclaimedIdle,Slot4_TotalTimeClaimedBusy,Slot4_TotalTimeUnclaimedIdle,Slot5_TotalTimeClaimedBusy,Slot5_TotalTimeUnclaimedIdle,Slot6_TotalTimeClaimedBusy,Slot6_TotalTimeUnclaimedIdle,Slot7_TotalTimeClaimedBusy,Slot7_To
 talTimeUnclaimedIdle,Slot8_TotalTimeClaimedBusy,Slot8_TotalTimeUnclaimedIdle,Slot10_ExpectedMachineGracefulDrainingCompletion,Slot10_JobStarts,Slot10_SelfMonitorAge,Slot11_ExpectedMachineGracefulDrainingCompletion,Slot11_JobStarts,Slot11_SelfMonitorAge,Slot12_ExpectedMachineGracefulDrainingCompletion,Slot12_JobStarts,Slot12_SelfMonitorAge,Slot9_ExpectedMachineGracefulDrainingCompletion,Slot9_JobStarts,Slot9_SelfMonitorAge,Slot12_TotalTimeClaimedBusy,Slot10_TotalTimeClaimedBusy,Slot10_TotalTimeUnclaimedIdle,Slot11_TotalTimeClaimedBusy,Slot11_TotalTimeUnclaimedIdle,Slot12_TotalTimeUnclaimedIdle,Slot9_TotalTimeClaimedBusy,Slot9_TotalTimeUnclaimedIdle,Slot13_ExpectedMachineGracefulDrainingCompletion,Slot13_JobStarts,Slot13_SelfMonitorAge,Slot14_ExpectedMachineGracefulDrainingCompletion,Slot14_JobStarts,Slot14_SelfMonitorAge,Slot15_ExpectedMachineGracefulDrainingCompletion,Slot15_JobStarts,Slot15_SelfMonitorAge,Slot16_ExpectedMachineGracefulDrainingCompletion,Slot16_JobStarts,Slot16_Self
 MonitorAge,IsResumable,WHEN_TO_TRANSFER_OUTPUT,_condor_Requestadmin_mutex_1,_condor_Requestadmin_mutex_2,_condor_Requestadmin_mutex_3,_condor_Requestmachine_token,Requestadmin_mutex_1,Requestadmin_mutex_2,Requestadmin_mutex_3,Requestmachine_token,nyehle,IsBuildJob,IsMatlabBuildJob,TotalJobRunTime,NodeOnline,Slot13_TotalTimeClaimedBusy,Slot13_TotalTimeUnclaimedIdle,Slot14_TotalTimeClaimedBusy,Slot14_TotalTimeUnclaimedIdle,Slot15_TotalTimeClaimedBusy,Slot15_TotalTimeUnclaimedIdle,Slot16_TotalTimeClaimedBusy,Slot16_TotalTimeUnclaimedIdle,TmpIsFull,trResumable,RequiresCMSFrontier,Slot17_ExpectedMachineGracefulDrainingCompletion,Slot17_JobStarts,Slot17_SelfMonitorAge,Slot17_TotalTimeClaimedBusy,Slot17_TotalTimeUnclaimedIdle,Slot18_ExpectedMachineGracefulDrainingCompletion,Slot18_JobStarts,Slot18_SelfMonitorAge,Slot18_TotalTimeClaimedBusy,Slot18_TotalTimeUnclaimedIdle,Slot19_ExpectedMachineGracefulDrainingCompletion,Slot19_JobStarts,Slot19_SelfMonitorAge,Slot19_TotalTimeClaimedBusy,Slot19
 _TotalTimeUnclaimedIdle,Slot20_ExpectedMachineGracefulDrainingCompletion,Slot20_JobStarts,Slot20_SelfMonitorAge,Slot20_TotalTimeClaimedBusy,Slot20_TotalTimeUnclaimedIdle,Slot21_ExpectedMachineGracefulDrainingCompletion,Slot21_JobStarts,Slot21_SelfMonitorAge,Slot21_TotalTimeClaimedBusy,Slot21_TotalTimeUnclaimedIdle,Slot22_ExpectedMachineGracefulDrainingCompletion,Slot22_JobStarts,Slot22_SelfMonitorAge,Slot22_TotalTimeClaimedBusy,Slot22_TotalTimeUnclaimedIdle,Slot23_ExpectedMachineGracefulDrainingCompletion,Slot23_JobStarts,Slot23_SelfMonitorAge,Slot23_TotalTimeClaimedBusy,Slot23_TotalTimeUnclaimedIdle,Slot24_ExpectedMachineGracefulDrainingCompletion,Slot24_JobStarts,Slot24_SelfMonitorAge,Slot24_TotalTimeClaimedBusy,Slot24_TotalTimeUnclaimedIdle,Slot25_ExpectedMachineGracefulDrainingCompletion,Slot25_JobStarts,Slot25_SelfMonitorAge,Slot25_TotalTimeClaimedBusy,Slot25_TotalTimeUnclaimedIdle,Slot26_ExpectedMachineGracefulDrainingCompletion,Slot26_JobStarts,Slot26_SelfMonitorAge,Slot26_To
 talTimeClaimedBusy,Slot26_TotalTimeUnclaimedIdle,Slot27_ExpectedMachineGracefulDrainingCompletion,Slot27_JobStarts,Slot27_SelfMonitorAge,Slot27_TotalTimeClaimedBusy,Slot27_TotalTimeUnclaimedIdle,Slot28_ExpectedMachineGracefulDrainingCompletion,Slot28_JobStarts,Slot28_SelfMonitorAge,Slot28_TotalTimeClaimedBusy,Slot28_TotalTimeUnclaimedIdle,Slot29_ExpectedMachineGracefulDrainingCompletion,Slot29_JobStarts,Slot29_SelfMonitorAge,Slot29_TotalTimeClaimedBusy,Slot29_TotalTimeUnclaimedIdle,Slot30_ExpectedMachineGracefulDrainingCompletion,Slot30_JobStarts,Slot30_SelfMonitorAge,Slot30_TotalTimeClaimedBusy,Slot30_TotalTimeUnclaimedIdle,Slot31_ExpectedMachineGracefulDrainingCompletion,Slot31_JobStarts,Slot31_SelfMonitorAge,Slot31_TotalTimeClaimedBusy,Slot31_TotalTimeUnclaimedIdle,Slot32_ExpectedMachineGracefulDrainingCompletion,Slot32_JobStarts,Slot32_SelfMonitorAge,Slot32_TotalTimeClaimedBusy,Slot32_TotalTimeUnclaimedIdle,ResidentSetSize", "NumCkpts": 0, "Err": "process.err", "RecentBlockWrite
 s": 0, "JobFinishedHookDone": 1446133919, "ProcId": 0, "ImageSize": 1000000, "JobUniverse": 5, "EncryptExecuteDirectory": false, "TransferInputSizeMB": 0, "RecentBlockReads": 0, "SpooledOutputFiles": "CURLTIME_1114551,ChtcWrapper162.out,AuditLog.162,simu_3_162.txt,harvest.log,162.out", "BlockWriteKbytes": 0, "WhenToTransferOutput": "ON_EXIT", "JobCurrentStartExecutingDate": 1446107688, "ExitBySignal": false, "LastMatchTime": 1446107686, "OnExitHold": false, "OrigMaxHosts": 1, "RequestMemory": 1000, "NumJobStarts": 1, "TerminationPending": true, "TotalSuspensions": 0, "BlockReads": 0, "DAGManJobId": 49581933, "MemoryUsage": "null( ( ResidentSetSize + 1023 ) / 1024 ); null", "ExitCode": 0, "JobNotification": 0, "BlockReadKbytes": 0, "NumJobMatches": 1, "LocalUserCpu": 0.0d, "LastJobStatus": 2, "BufferBlockSize": 32768, "CommittedTime": 26233, "ExecutableSize_RAW": 6, "LastSuspensionTime": 0, "Matlab": "R2011b", "UserLog": "/home/xguo23/finally_2/Simulation_condor/model_3/162/process.l
 og", "DAGManNodesMask": "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27", "CumulativeSlotTime": 26233.0d, "LastJobLeaseRenewal": 1446133919, "MachineAttrSlotWeight0": 1, "NumSystemHolds": 0, "BytesRecvd": 285054.0d, "CondorPlatform": "$CondorPlatform: X86_64-RedHat_6.6 $", "JOBGLIDEIN_ResourceName": "$$([IfThenElse(IsUndefined(TARGET.GLIDEIN_ResourceName), IfThenElse(IsUndefined(TARGET.GLIDEIN_Site), \"wisc.edu\", TARGET.GLIDEIN_Site), TARGET.GLIDEIN_ResourceName)])", "DAGNodeName": "162+162", "PeriodicRelease": "null( JobStatus == 5 ) && ( ( CurrentTime - EnteredCurrentStatus ) > 1800 ) && ( JobRunCount < 5 ) && ( HoldReasonCode != 6 ) && ( HoldReasonCode != 14 ) && ( HoldReasonCode != 22 ); null", "JobRunCount": 1, "LastRemoteHost": "slot1@e370.chtc.WISC.EDU", "JobPrio": 0, "LocalSysCpu": 0.0d, "ExecutableSize": 7, "RemoteSysCpu": 96.0d, "TransferInput": "/home/xguo23/finally_2/Simulation_condor/data/162/,/home/xguo23/finally_2/Simulation_condor/data/shared/", "PeriodicHold": false, "WantR
 emoteIO": true, "CommittedSuspensionTime": 0, "DAGParentNodeNames": "", "CompletionDate": 1446133919, "StreamErr": false, "RecentBlockReadKbytes": 0, "WantRemoteSyscalls": false, "NumShadowStarts": 1, "MachineAttrCpus0": 1, "DAGManNodesLog": "/home/xguo23/finally_2/Simulation_condor/model_3/./mydag.dag.nodes.log", "Owner": "xguo23", "Requirements": "null( ( OpSysMajorVer is 6 ) ) && ( MY.JobUniverse == 12 || MY.JobUniverse == 7 || MY.WantFlocking || MY.WantGlidein || TARGET.PoolName == \"CHTC\" || TARGET.COLLECTOR_HOST_STRING == \"infopool.cs.wisc.edu\" ) && ( TARGET.Arch == \"X86_64\" ) && ( TARGET.OpSys == \"LINUX\" ) && ( TARGET.Disk >= RequestDisk ) && ( TARGET.Memory >= RequestMemory ) && ( TARGET.HasFileTransfer ); null", "DiskUsage": 1250000, "JobLeaseDuration": 2400, "ClusterId": 49582148, "BufferSize": 524288, "IsCHTCSubmit": true, "RecentStatsLifetimeStarter": 1200, "CommittedSlotTime": 26233.0d, "Args": "--type=Matlab --version=R2011b --cmdtorun=simu_condor --unique=162 -
 - 3", "Environment": "", "LastPublicClaimId": "<128.105.245.170:9482>#1443991414#13008#...", "Iwd": "/home/xguo23/finally_2/Simulation_condor/model_3/162", "QDate": 1446105547, "CurrentHosts": 0, "User": "xguo23@chtc.wisc.edu", "StreamOut": false }
- { "GlobalJobId": "submit-3.chtc.wisc.edu#49582154.0#1446105553", "StatsLifetimeStarter": 25874, "JobStartDate": 1446107686, "SubmitEventNotes": "DAG Node: 333+333", "JobStatus": 4, "LeaveJobInQueue": false, "StartdPrincipal": "execute-side@matchsession/128.105.245.120", "WantRHEL6": true, "OnExitRemove": true, "JobCurrentStartDate": 1446107686, "CoreSize": 0, "MATCH_EXP_JOBGLIDEIN_ResourceName": "wisc.edu", "Rank": 0.0d, "ExitStatus": 0, "ResidentSetSize": 150000, "RemoteWallClockTime": 25876.0d, "WantCheckpoint": false, "In": "/dev/null", "MaxHosts": 1, "RootDir": "/", "NumRestarts": 0, "RecentBlockWriteKbytes": 0, "DiskUsage_RAW": 1216669, "EnteredCurrentStatus": 1446133562, "ResidentSetSize_RAW": 125740, "RequestDisk": 1000000, "MyType": "Job", "PeriodicRemove": false, "Cmd": "/home/xguo23/finally_2/Simulation_condor/chtcjobwrapper", "CondorVersion": "$CondorVersion: 8.5.0 Sep 16 2015 BuildID: 341710 $", "ShouldTransferFiles": "YES", "TargetType": "Machine", "MinHosts": 1, "NumC
 kpts_RAW": 0, "RequestCpus": 1, "RemoteUserCpu": 25692.0d, "BlockWrites": 0, "NiceUser": false, "Out": "process.out", "ImageSize_RAW": 811948, "BytesSent": 30542.0d, "CumulativeSuspensionTime": 0, "TransferIn": false, "NumCkpts": 0, "Err": "process.err", "RecentBlockWrites": 0, "JobFinishedHookDone": 1446133562, "ProcId": 0, "ImageSize": 1000000, "JobUniverse": 5, "EncryptExecuteDirectory": false, "TransferInputSizeMB": 0, "RecentBlockReads": 0, "SpooledOutputFiles": "CURLTIME_523030,ChtcWrapper333.out,AuditLog.333,simu_3_333.txt,harvest.log,333.out", "BlockWriteKbytes": 0, "WhenToTransferOutput": "ON_EXIT", "JobCurrentStartExecutingDate": 1446107688, "ExitBySignal": false, "LastMatchTime": 1446107686, "OnExitHold": false, "OrigMaxHosts": 1, "RequestMemory": 1000, "NumJobStarts": 1, "TerminationPending": true, "TotalSuspensions": 0, "BlockReads": 0, "DAGManJobId": 49581933, "MemoryUsage": "null( ( ResidentSetSize + 1023 ) / 1024 ); null", "ExitCode": 0, "JobNotification": 0, "BlockR
 eadKbytes": 0, "NumJobMatches": 1, "LocalUserCpu": 0.0d, "LastJobStatus": 2, "BufferBlockSize": 32768, "CommittedTime": 25876, "ExecutableSize_RAW": 6, "LastSuspensionTime": 0, "Matlab": "R2011b", "UserLog": "/home/xguo23/finally_2/Simulation_condor/model_3/333/proc

<TRUNCATED>


[14/50] [abbrv] incubator-asterixdb git commit: Merge branch 'master' into hyracks-merge2

Posted by im...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/util/FileSystemWatcher.java
----------------------------------------------------------------------
diff --cc asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/util/FileSystemWatcher.java
index 4eec348,0000000..ea5cc8f
mode 100644,000000..100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/util/FileSystemWatcher.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/util/FileSystemWatcher.java
@@@ -1,276 -1,0 +1,283 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +package org.apache.asterix.external.util;
 +
 +import java.io.File;
 +import java.io.IOException;
 +import java.nio.file.ClosedWatchServiceException;
 +import java.nio.file.FileSystems;
 +import java.nio.file.Files;
 +import java.nio.file.LinkOption;
 +import java.nio.file.Path;
 +import java.nio.file.StandardWatchEventKinds;
 +import java.nio.file.WatchEvent;
 +import java.nio.file.WatchEvent.Kind;
 +import java.nio.file.WatchKey;
 +import java.nio.file.WatchService;
 +import java.util.HashMap;
 +import java.util.Iterator;
 +import java.util.LinkedList;
++import java.util.List;
++import java.util.concurrent.locks.ReentrantLock;
 +
- import org.apache.asterix.external.dataflow.AbstractFeedDataFlowController;
 +import org.apache.hyracks.api.exceptions.HyracksDataException;
 +import org.apache.log4j.Level;
 +import org.apache.log4j.Logger;
 +
 +public class FileSystemWatcher {
 +
 +    private static final Logger LOGGER = Logger.getLogger(FileSystemWatcher.class.getName());
 +    private WatchService watcher;
 +    private final HashMap<WatchKey, Path> keys;
 +    private final LinkedList<File> files = new LinkedList<File>();
 +    private Iterator<File> it;
 +    private final String expression;
 +    private FeedLogManager logManager;
-     private final Path path;
++    private final List<Path> paths;
 +    private final boolean isFeed;
 +    private boolean done;
-     private File current;
-     private AbstractFeedDataFlowController controller;
 +    private final LinkedList<Path> dirs;
++    private final ReentrantLock lock = new ReentrantLock();
 +
-     public FileSystemWatcher(Path inputResource, String expression, boolean isFeed) {
++    public FileSystemWatcher(List<Path> inputResources, String expression, boolean isFeed) throws HyracksDataException {
++        this.isFeed = isFeed;
 +        this.keys = isFeed ? new HashMap<WatchKey, Path>() : null;
 +        this.expression = expression;
-         this.path = inputResource;
-         this.isFeed = isFeed;
++        this.paths = inputResources;
 +        this.dirs = new LinkedList<Path>();
++        if (!isFeed) {
++            init();
++        }
 +    }
 +
-     public void setFeedLogManager(FeedLogManager feedLogManager) {
-         this.logManager = feedLogManager;
++    public synchronized void setFeedLogManager(FeedLogManager feedLogManager) throws HyracksDataException {
++        if (logManager == null) {
++            this.logManager = feedLogManager;
++            init();
++        }
 +    }
 +
-     public void init() throws HyracksDataException {
++    public synchronized void init() throws HyracksDataException {
 +        try {
 +            dirs.clear();
-             LocalFileSystemUtils.traverse(files, path.toFile(), expression, dirs);
-             it = files.iterator();
-             if (isFeed) {
-                 keys.clear();
-                 if (watcher != null) {
-                     try {
-                         watcher.close();
-                     } catch (IOException e) {
-                         LOGGER.warn("Failed to close watcher service", e);
++            for (Path path : paths) {
++                LocalFileSystemUtils.traverse(files, path.toFile(), expression, dirs);
++                it = files.iterator();
++                if (isFeed) {
++                    keys.clear();
++                    if (watcher != null) {
++                        try {
++                            watcher.close();
++                        } catch (IOException e) {
++                            LOGGER.warn("Failed to close watcher service", e);
++                        }
++                    }
++                    watcher = FileSystems.getDefault().newWatchService();
++                    for (Path dirPath : dirs) {
++                        register(dirPath);
++                    }
++                    resume();
++                } else {
++                    if (files.isEmpty()) {
++                        throw new HyracksDataException(path + ": no files found");
 +                    }
 +                }
-                 watcher = FileSystems.getDefault().newWatchService();
-                 for (Path path : dirs) {
-                     register(path);
-                 }
-                 resume();
 +            }
 +        } catch (IOException e) {
 +            throw new HyracksDataException(e);
 +        }
 +    }
 +
 +    /**
 +     * Register the given directory, and all its sub-directories, with the
 +     * WatchService.
 +     */
 +    private void register(Path dir) throws IOException {
 +        WatchKey key = dir.register(watcher, StandardWatchEventKinds.ENTRY_CREATE, StandardWatchEventKinds.ENTRY_DELETE,
 +                StandardWatchEventKinds.ENTRY_MODIFY);
 +        keys.put(key, dir);
 +    }
 +
-     private void resume() throws IOException {
++    private synchronized void resume() throws IOException {
 +        if (logManager == null) {
 +            return;
 +        }
 +        /*
 +         * Done processing the progress log file. We now have:
 +         * the files that were completed.
 +         */
 +
 +        if (it == null) {
 +            return;
 +        }
 +        while (it.hasNext()) {
 +            File file = it.next();
 +            if (logManager.isSplitRead(file.getAbsolutePath())) {
 +                // File was read completely, remove it from the files list
 +                it.remove();
 +            }
 +        }
 +        // reset the iterator
 +        it = files.iterator();
 +    }
 +
 +    @SuppressWarnings("unchecked")
 +    static <T> WatchEvent<T> cast(WatchEvent<?> event) {
 +        return (WatchEvent<T>) event;
 +    }
 +
 +    private void handleEvents(WatchKey key) throws IOException {
 +        // get dir associated with the key
 +        Path dir = keys.get(key);
 +        if (dir == null) {
 +            // This should never happen
 +            if (LOGGER.isEnabledFor(Level.WARN)) {
 +                LOGGER.warn("WatchKey not recognized!!");
 +            }
 +            return;
 +        }
 +        for (WatchEvent<?> event : key.pollEvents()) {
 +            Kind<?> kind = event.kind();
-             // TODO: Do something about overflow events
 +            // An overflow event means that some events were dropped
 +            if (kind == StandardWatchEventKinds.OVERFLOW) {
 +                if (LOGGER.isEnabledFor(Level.WARN)) {
 +                    LOGGER.warn("Overflow event. Some events might have been missed");
 +                }
 +                // need to read and validate all files.
-                 //TODO: use btrees for all logs
 +                init();
 +                return;
 +            }
 +
 +            // Context for directory entry event is the file name of entry
 +            WatchEvent<Path> ev = cast(event);
 +            Path name = ev.context();
 +            Path child = dir.resolve(name);
 +            // if directory is created then register it and its sub-directories
 +            if ((kind == StandardWatchEventKinds.ENTRY_CREATE)) {
 +                try {
 +                    if (Files.isDirectory(child, LinkOption.NOFOLLOW_LINKS)) {
 +                        register(child);
 +                    } else {
 +                        // it is a file, add it to the files list.
 +                        LocalFileSystemUtils.validateAndAdd(child, expression, files);
 +                    }
 +                } catch (IOException e) {
 +                    if (LOGGER.isEnabledFor(Level.ERROR)) {
 +                        LOGGER.error(e);
 +                    }
 +                }
 +            }
 +        }
++        it = files.iterator();
 +    }
 +
-     public void close() throws IOException {
++    public synchronized void close() throws IOException {
 +        if (!done) {
 +            if (watcher != null) {
 +                watcher.close();
 +                watcher = null;
 +            }
-             if (logManager != null) {
-                 if (current != null) {
-                     logManager.startPartition(current.getAbsolutePath());
-                     logManager.endPartition();
-                 }
-                 logManager.close();
-                 current = null;
-             }
 +            done = true;
 +        }
 +    }
 +
-     public File next() throws IOException {
-         if ((current != null) && (logManager != null)) {
-             logManager.startPartition(current.getAbsolutePath());
-             logManager.endPartition();
-         }
-         current = it.next();
-         return current;
-     }
- 
-     private boolean endOfEvents(WatchKey key) {
-         // reset key and remove from set if directory no longer accessible
-         if (!key.reset()) {
-             keys.remove(key);
-             if (keys.isEmpty()) {
-                 return true;
-             }
-         }
-         return false;
-     }
- 
-     public boolean hasNext() throws IOException {
++    // poll is not blocking
++    public synchronized File poll() throws IOException {
 +        if (it.hasNext()) {
-             return true;
++            return it.next();
 +        }
 +        if (done || !isFeed) {
-             return false;
++            return null;
 +        }
 +        files.clear();
++        it = files.iterator();
 +        if (keys.isEmpty()) {
-             return false;
++            close();
++            return null;
 +        }
 +        // Read new Events (Polling first to add all available files)
 +        WatchKey key;
 +        key = watcher.poll();
 +        while (key != null) {
 +            handleEvents(key);
 +            if (endOfEvents(key)) {
 +                close();
-                 return false;
++                return null;
 +            }
 +            key = watcher.poll();
 +        }
-         // No file was found, wait for the filesystem to push events
-         if (controller != null) {
-             controller.flush();
++        return null;
++    }
++
++    // take is blocking
++    public synchronized File take() throws IOException {
++        File next = poll();
++        if (next != null) {
++            return next;
 +        }
-         while (files.isEmpty()) {
-             try {
-                 key = watcher.take();
-             } catch (InterruptedException x) {
-                 if (LOGGER.isEnabledFor(Level.WARN)) {
-                     LOGGER.warn("Feed Closed");
-                 }
-                 if (watcher == null) {
-                     return false;
-                 }
-                 continue;
-             } catch (ClosedWatchServiceException e) {
-                 if (LOGGER.isEnabledFor(Level.WARN)) {
-                     LOGGER.warn("The watcher has exited");
++        if (done || !isFeed) {
++            return null;
++        }
++        // No file was found, wait for the filesystem to push events
++        WatchKey key = null;
++        lock.lock();
++        try {
++            while (!it.hasNext()) {
++                try {
++                    key = watcher.take();
++                } catch (InterruptedException x) {
++                    if (LOGGER.isEnabledFor(Level.WARN)) {
++                        LOGGER.warn("Feed Closed");
++                    }
++                    if (watcher == null) {
++                        return null;
++                    }
++                    continue;
++                } catch (ClosedWatchServiceException e) {
++                    if (LOGGER.isEnabledFor(Level.WARN)) {
++                        LOGGER.warn("The watcher has exited");
++                    }
++                    if (watcher == null) {
++                        return null;
++                    }
++                    continue;
 +                }
-                 if (watcher == null) {
-                     return false;
++                handleEvents(key);
++                if (endOfEvents(key)) {
++                    return null;
 +                }
-                 continue;
-             }
-             handleEvents(key);
-             if (endOfEvents(key)) {
-                 return false;
 +            }
++        } finally {
++            lock.unlock();
 +        }
 +        // files were found, re-create the iterator and move it one step
-         it = files.iterator();
-         return it.hasNext();
++        return it.next();
 +    }
 +
-     public void setController(AbstractFeedDataFlowController controller) {
-         this.controller = controller;
++    private boolean endOfEvents(WatchKey key) {
++        // reset key and remove from set if directory no longer accessible
++        if (!key.reset()) {
++            keys.remove(key);
++            if (keys.isEmpty()) {
++                return true;
++            }
++        }
++        return false;
 +    }
 +}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/util/LocalFileSystemUtils.java
----------------------------------------------------------------------
diff --cc asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/util/LocalFileSystemUtils.java
index d6e9463,0000000..16dd1e9
mode 100644,000000..100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/util/LocalFileSystemUtils.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/util/LocalFileSystemUtils.java
@@@ -1,75 -1,0 +1,76 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +package org.apache.asterix.external.util;
 +
 +import java.io.File;
 +import java.io.IOException;
 +import java.nio.file.FileVisitResult;
 +import java.nio.file.Files;
 +import java.nio.file.LinkOption;
 +import java.nio.file.Path;
 +import java.nio.file.SimpleFileVisitor;
 +import java.nio.file.attribute.BasicFileAttributes;
 +import java.util.LinkedList;
 +import java.util.regex.Pattern;
 +
++import org.apache.hyracks.api.exceptions.HyracksDataException;
++
 +public class LocalFileSystemUtils {
 +
-     //TODO: replace this method by FileUtils.iterateFilesAndDirs(.)
 +    public static void traverse(final LinkedList<File> files, File root, final String expression,
 +            final LinkedList<Path> dirs) throws IOException {
-         if (!Files.exists(root.toPath())) {
-             return;
++        final Path path = root.toPath();
++        if (!Files.exists(path)) {
++            throw new HyracksDataException(path + ": path not found");
 +        }
-         if (!Files.isDirectory(root.toPath())) {
-             validateAndAdd(root.toPath(), expression, files);
++        if (!Files.isDirectory(path)) {
++            validateAndAdd(path, expression, files);
 +        }
-         //FileUtils.iterateFilesAndDirs(directory, fileFilter, dirFilter)
-         Files.walkFileTree(root.toPath(), new SimpleFileVisitor<Path>() {
++        Files.walkFileTree(path, new SimpleFileVisitor<Path>() {
 +            @Override
 +            public FileVisitResult preVisitDirectory(Path path, BasicFileAttributes attrs) throws IOException {
 +                if (!Files.exists(path, LinkOption.NOFOLLOW_LINKS)) {
 +                    return FileVisitResult.TERMINATE;
 +                }
 +                if (Files.isDirectory(path, LinkOption.NOFOLLOW_LINKS)) {
 +                    if (dirs != null) {
 +                        dirs.add(path);
 +                    }
 +                    //get immediate children files
 +                    File[] content = path.toFile().listFiles();
 +                    for (File file : content) {
 +                        if (!file.isDirectory()) {
 +                            validateAndAdd(file.toPath(), expression, files);
 +                        }
 +                    }
 +                } else {
 +                    // Path is a file, add to list of files if it matches the expression
 +                    validateAndAdd(path, expression, files);
 +                }
 +                return FileVisitResult.CONTINUE;
 +            }
 +        });
 +    }
 +
 +    public static void validateAndAdd(Path path, String expression, LinkedList<File> files) {
 +        if (expression == null || Pattern.matches(expression, path.toString())) {
 +            files.add(new File(path.toString()));
 +        }
 +    }
 +}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-external-data/src/test/java/org/apache/asterix/external/classad/test/ClassAdToADMTest.java
----------------------------------------------------------------------
diff --cc asterixdb/asterix-external-data/src/test/java/org/apache/asterix/external/classad/test/ClassAdToADMTest.java
index d822310,0000000..493bd3b
mode 100644,000000..100644
--- a/asterixdb/asterix-external-data/src/test/java/org/apache/asterix/external/classad/test/ClassAdToADMTest.java
+++ b/asterixdb/asterix-external-data/src/test/java/org/apache/asterix/external/classad/test/ClassAdToADMTest.java
@@@ -1,110 -1,0 +1,189 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +package org.apache.asterix.external.classad.test;
 +
++import java.io.File;
++import java.io.PrintStream;
++import java.nio.file.Files;
++import java.nio.file.Path;
 +import java.nio.file.Paths;
++import java.util.ArrayList;
++import java.util.List;
 +import java.util.Map;
 +import java.util.Map.Entry;
 +
 +import org.apache.asterix.external.api.IRawRecord;
 +import org.apache.asterix.external.classad.CaseInsensitiveString;
 +import org.apache.asterix.external.classad.CharArrayLexerSource;
 +import org.apache.asterix.external.classad.ClassAd;
 +import org.apache.asterix.external.classad.ExprTree;
 +import org.apache.asterix.external.classad.Value;
 +import org.apache.asterix.external.classad.object.pool.ClassAdObjectPool;
 +import org.apache.asterix.external.input.record.reader.stream.SemiStructuredRecordReader;
 +import org.apache.asterix.external.input.stream.LocalFSInputStream;
 +import org.apache.asterix.external.library.ClassAdParser;
- import org.apache.hyracks.api.io.FileReference;
- import org.apache.hyracks.dataflow.std.file.FileSplit;
++import org.apache.asterix.external.util.FileSystemWatcher;
++import org.apache.asterix.formats.nontagged.AqlADMPrinterFactoryProvider;
++import org.apache.asterix.formats.nontagged.AqlSerializerDeserializerProvider;
++import org.apache.asterix.om.types.ARecordType;
++import org.apache.asterix.om.types.BuiltinType;
++import org.apache.asterix.om.types.IAType;
++import org.apache.commons.io.FileUtils;
++import org.apache.hyracks.algebricks.data.IPrinter;
++import org.apache.hyracks.algebricks.data.IPrinterFactory;
++import org.apache.hyracks.api.dataflow.value.ISerializerDeserializer;
++import org.apache.hyracks.api.exceptions.HyracksDataException;
++import org.apache.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
++import org.junit.Assert;
 +
 +import junit.framework.Test;
 +import junit.framework.TestCase;
 +import junit.framework.TestSuite;
 +
 +public class ClassAdToADMTest extends TestCase {
 +    /**
 +     * Create the test case
 +     *
 +     * @param testName
 +     *            name of the test case
 +     */
 +    public ClassAdToADMTest(String testName) {
 +        super(testName);
 +    }
 +
 +    /**
 +     * @return the suite of tests being tested
 +     */
 +    public static Test suite() {
 +        return new TestSuite(ClassAdToADMTest.class);
 +    }
 +
++    private void printTuple(ArrayTupleBuilder tb, IPrinter[] printers, PrintStream printStream)
++            throws HyracksDataException {
++        int[] offsets = tb.getFieldEndOffsets();
++        for (int i = 0; i < printers.length; i++) {
++            int offset = i == 0 ? 0 : offsets[i - 1];
++            int length = i == 0 ? offsets[0] : offsets[i] - offsets[i - 1];
++            printers[i].print(tb.getByteArray(), offset, length, printStream);
++            printStream.println();
++        }
++    }
++
++    @SuppressWarnings("rawtypes")
++    public void testSchemaful() {
++        try {
++            File file = new File("target/classad-wtih-temporals.adm");
++            File expected = new File(getClass().getResource("/results/classad-with-temporals.adm").toURI().getPath());
++            FileUtils.deleteQuietly(file);
++            PrintStream printStream = new PrintStream(Files.newOutputStream(Paths.get(file.toURI())));
++            String[] recordFieldNames = { "GlobalJobId", "Owner", "ClusterId", "ProcId", "RemoteWallClockTime",
++                    "CompletionDate", "QDate", "JobCurrentStartDate", "JobStartDate", "JobCurrentStartExecutingDate" };
++            IAType[] recordFieldTypes = { BuiltinType.ASTRING, BuiltinType.ASTRING, BuiltinType.AINT32,
++                    BuiltinType.AINT32, BuiltinType.ADURATION, BuiltinType.ADATETIME, BuiltinType.ADATETIME,
++                    BuiltinType.ADATETIME, BuiltinType.ADATETIME, BuiltinType.ADATETIME };
++            ARecordType recordType = new ARecordType("value", recordFieldNames, recordFieldTypes, true);
++            int numOfTupleFields = 1;
++            ISerializerDeserializer[] serdes = new ISerializerDeserializer[1];
++            serdes[0] = AqlSerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(recordType);
++            IPrinterFactory[] printerFactories = new IPrinterFactory[1];
++            printerFactories[0] = AqlADMPrinterFactoryProvider.INSTANCE.getPrinterFactory(recordType);
++            // create output descriptor
++            IPrinter[] printers = new IPrinter[printerFactories.length];
++            for (int i = 0; i < printerFactories.length; i++) {
++                printers[i] = printerFactories[i].createPrinter();
++            }
++            ClassAdObjectPool objectPool = new ClassAdObjectPool();
++            String[] files = new String[] { "/classad-with-temporals.classads" };
++            ClassAdParser parser = new ClassAdParser(recordType, false, false, false, null, null, null, objectPool);
++            ArrayTupleBuilder tb = new ArrayTupleBuilder(numOfTupleFields);
++            for (String path : files) {
++                List<Path> paths = new ArrayList<>();
++                paths.add(Paths.get(getClass().getResource(path).toURI()));
++                FileSystemWatcher watcher = new FileSystemWatcher(paths, null, false);
++                LocalFSInputStream in = new LocalFSInputStream(watcher);
++                SemiStructuredRecordReader recordReader = new SemiStructuredRecordReader(in, "[", "]");
++                while (recordReader.hasNext()) {
++                    tb.reset();
++                    IRawRecord<char[]> record = recordReader.next();
++                    parser.parse(record, tb.getDataOutput());
++                    tb.addFieldEndOffset();
++                    printTuple(tb, printers, printStream);
++                }
++                recordReader.close();
++                printStream.close();
++                Assert.assertTrue(FileUtils.contentEquals(file, expected));
++            }
++        } catch (Throwable th) {
++            System.err.println("TEST FAILED");
++            th.printStackTrace();
++            Assert.assertTrue(false);
++        }
++        System.err.println("TEST PASSED");
++    }
++
 +    /**
 +     *
 +     */
-     public void test() {
++    public void testSchemaless() {
 +        try {
-             // test here
 +            ClassAdObjectPool objectPool = new ClassAdObjectPool();
 +            ClassAd pAd = new ClassAd(objectPool);
 +            String[] files = new String[] { "/jobads.txt" };
 +            ClassAdParser parser = new ClassAdParser(objectPool);
 +            CharArrayLexerSource lexerSource = new CharArrayLexerSource();
 +            for (String path : files) {
-                 LocalFSInputStream in = new LocalFSInputStream(
-                         new FileSplit[] { new FileSplit("",
-                                 new FileReference(Paths.get(getClass().getResource(path).toURI()).toFile())) },
-                         null, null, 0, null, false);
-                 SemiStructuredRecordReader recordReader = new SemiStructuredRecordReader(in, null, "[", "]");
++                List<Path> paths = new ArrayList<>();
++                paths.add(Paths.get(getClass().getResource(path).toURI()));
++                FileSystemWatcher watcher = new FileSystemWatcher(paths, null, false);
++                LocalFSInputStream in = new LocalFSInputStream(watcher);
++                SemiStructuredRecordReader recordReader = new SemiStructuredRecordReader(in, "[", "]");
 +                Value val = new Value(objectPool);
 +                while (recordReader.hasNext()) {
 +                    val.reset();
 +                    IRawRecord<char[]> record = recordReader.next();
 +                    lexerSource.setNewSource(record.get());
 +                    parser.setLexerSource(lexerSource);
 +                    parser.parseNext(pAd);
 +                    Map<CaseInsensitiveString, ExprTree> attrs = pAd.getAttrList();
 +                    for (Entry<CaseInsensitiveString, ExprTree> entry : attrs.entrySet()) {
 +                        ExprTree tree = entry.getValue();
 +                        switch (tree.getKind()) {
 +                            case ATTRREF_NODE:
 +                            case CLASSAD_NODE:
 +                            case EXPR_ENVELOPE:
 +                            case EXPR_LIST_NODE:
 +                            case FN_CALL_NODE:
 +                            case OP_NODE:
 +                                break;
 +                            case LITERAL_NODE:
 +                                break;
 +                            default:
 +                                System.out.println("Something is wrong");
 +                                break;
 +                        }
 +                    }
 +                }
 +                recordReader.close();
 +            }
 +        } catch (Exception e) {
 +            e.printStackTrace();
 +            assertTrue(false);
 +        }
 +    }
 +}


[42/50] [abbrv] incubator-asterixdb git commit: Move merged files

Posted by im...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d630d1a2/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/visitor/CheckSql92AggregateVisitor.java
----------------------------------------------------------------------
diff --git a/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/visitor/CheckSql92AggregateVisitor.java b/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/visitor/CheckSql92AggregateVisitor.java
deleted file mode 100644
index 1bca7ac..0000000
--- a/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/visitor/CheckSql92AggregateVisitor.java
+++ /dev/null
@@ -1,265 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.asterix.lang.sqlpp.visitor;
-
-import java.util.List;
-
-import org.apache.asterix.common.exceptions.AsterixException;
-import org.apache.asterix.common.functions.FunctionSignature;
-import org.apache.asterix.lang.common.base.Expression;
-import org.apache.asterix.lang.common.base.ILangExpression;
-import org.apache.asterix.lang.common.clause.GroupbyClause;
-import org.apache.asterix.lang.common.clause.LetClause;
-import org.apache.asterix.lang.common.clause.LimitClause;
-import org.apache.asterix.lang.common.clause.OrderbyClause;
-import org.apache.asterix.lang.common.clause.WhereClause;
-import org.apache.asterix.lang.common.expression.CallExpr;
-import org.apache.asterix.lang.common.expression.FieldAccessor;
-import org.apache.asterix.lang.common.expression.FieldBinding;
-import org.apache.asterix.lang.common.expression.IfExpr;
-import org.apache.asterix.lang.common.expression.IndexAccessor;
-import org.apache.asterix.lang.common.expression.ListConstructor;
-import org.apache.asterix.lang.common.expression.LiteralExpr;
-import org.apache.asterix.lang.common.expression.OperatorExpr;
-import org.apache.asterix.lang.common.expression.QuantifiedExpression;
-import org.apache.asterix.lang.common.expression.RecordConstructor;
-import org.apache.asterix.lang.common.expression.UnaryExpr;
-import org.apache.asterix.lang.common.expression.VariableExpr;
-import org.apache.asterix.lang.common.statement.FunctionDecl;
-import org.apache.asterix.lang.common.statement.Query;
-import org.apache.asterix.lang.sqlpp.clause.FromClause;
-import org.apache.asterix.lang.sqlpp.clause.FromTerm;
-import org.apache.asterix.lang.sqlpp.clause.HavingClause;
-import org.apache.asterix.lang.sqlpp.clause.JoinClause;
-import org.apache.asterix.lang.sqlpp.clause.NestClause;
-import org.apache.asterix.lang.sqlpp.clause.Projection;
-import org.apache.asterix.lang.sqlpp.clause.SelectBlock;
-import org.apache.asterix.lang.sqlpp.clause.SelectClause;
-import org.apache.asterix.lang.sqlpp.clause.SelectElement;
-import org.apache.asterix.lang.sqlpp.clause.SelectRegular;
-import org.apache.asterix.lang.sqlpp.clause.SelectSetOperation;
-import org.apache.asterix.lang.sqlpp.clause.UnnestClause;
-import org.apache.asterix.lang.sqlpp.expression.SelectExpression;
-import org.apache.asterix.lang.sqlpp.util.FunctionMapUtil;
-import org.apache.asterix.lang.sqlpp.visitor.base.AbstractSqlppQueryExpressionVisitor;
-
-/**
- * This visitor checks if a language construct contains SQL-92 aggregates.
- */
-public class CheckSql92AggregateVisitor extends AbstractSqlppQueryExpressionVisitor<Boolean, ILangExpression> {
-
-    @Override
-    public Boolean visit(Query q, ILangExpression parentSelectBlock) throws AsterixException {
-        return false;
-    }
-
-    @Override
-    public Boolean visit(FunctionDecl fd, ILangExpression parentSelectBlock) throws AsterixException {
-        return false;
-    }
-
-    @Override
-    public Boolean visit(LiteralExpr l, ILangExpression parentSelectBlock) throws AsterixException {
-        return false;
-    }
-
-    @Override
-    public Boolean visit(VariableExpr v, ILangExpression parentSelectBlock) throws AsterixException {
-        return false;
-    }
-
-    @Override
-    public Boolean visit(ListConstructor lc, ILangExpression parentSelectBlock) throws AsterixException {
-        return visitExprList(lc.getExprList(), parentSelectBlock);
-    }
-
-    @Override
-    public Boolean visit(RecordConstructor rc, ILangExpression parentSelectBlock) throws AsterixException {
-        for (FieldBinding fieldBinding : rc.getFbList()) {
-            ILangExpression leftExpr = fieldBinding.getLeftExpr();
-            ILangExpression rightExpr = fieldBinding.getRightExpr();
-            if (leftExpr.accept(this, parentSelectBlock)) {
-                return true;
-            }
-            if (rightExpr.accept(this, parentSelectBlock)) {
-                return true;
-            }
-        }
-        return false;
-    }
-
-    @Override
-    public Boolean visit(OperatorExpr ifbo, ILangExpression parentSelectBlock) throws AsterixException {
-        return visitExprList(ifbo.getExprList(), parentSelectBlock);
-    }
-
-    @Override
-    public Boolean visit(FieldAccessor fa, ILangExpression parentSelectBlock) throws AsterixException {
-        return fa.getExpr().accept(this, parentSelectBlock);
-    }
-
-    @Override
-    public Boolean visit(IndexAccessor ia, ILangExpression parentSelectBlock) throws AsterixException {
-        return ia.getExpr().accept(this, parentSelectBlock);
-    }
-
-    @Override
-    public Boolean visit(IfExpr ifexpr, ILangExpression parentSelectBlock) throws AsterixException {
-        if (ifexpr.getCondExpr().accept(this, parentSelectBlock)) {
-            return true;
-        } else {
-            return ifexpr.getThenExpr().accept(this, parentSelectBlock)
-                    || ifexpr.getElseExpr().accept(this, parentSelectBlock);
-        }
-    }
-
-    @Override
-    public Boolean visit(QuantifiedExpression qe, ILangExpression parentSelectBlock) throws AsterixException {
-        return false;
-    }
-
-    @Override
-    public Boolean visit(UnaryExpr u, ILangExpression parentSelectBlock) throws AsterixException {
-        return u.getExpr().accept(this, parentSelectBlock);
-    }
-
-    @Override
-    public Boolean visit(CallExpr pf, ILangExpression parentSelectBlock) throws AsterixException {
-        FunctionSignature fs = pf.getFunctionSignature();
-        if (FunctionMapUtil.isSql92AggregateFunction(fs)) {
-            return true;
-        }
-        for (Expression parameter : pf.getExprList()) {
-            if (parameter.accept(this, parentSelectBlock)) {
-                return true;
-            }
-        }
-        return false;
-    }
-
-    @Override
-    public Boolean visit(LetClause lc, ILangExpression parentSelectBlock) throws AsterixException {
-        return false;
-    }
-
-    @Override
-    public Boolean visit(WhereClause wc, ILangExpression parentSelectBlock) throws AsterixException {
-        return false;
-    }
-
-    @Override
-    public Boolean visit(OrderbyClause oc, ILangExpression parentSelectBlock) throws AsterixException {
-        return false;
-    }
-
-    @Override
-    public Boolean visit(GroupbyClause gc, ILangExpression parentSelectBlock) throws AsterixException {
-        return false;
-    }
-
-    @Override
-    public Boolean visit(LimitClause lc, ILangExpression parentSelectBlock) throws AsterixException {
-        return false;
-    }
-
-    @Override
-    public Boolean visit(FromClause fromClause, ILangExpression parentSelectBlock) throws AsterixException {
-        return false;
-    }
-
-    @Override
-    public Boolean visit(FromTerm fromTerm, ILangExpression parentSelectBlock) throws AsterixException {
-        return false;
-    }
-
-    @Override
-    public Boolean visit(JoinClause joinClause, ILangExpression parentSelectBlock) throws AsterixException {
-        return false;
-    }
-
-    @Override
-    public Boolean visit(NestClause nestClause, ILangExpression parentSelectBlock) throws AsterixException {
-        return false;
-    }
-
-    @Override
-    public Boolean visit(Projection projection, ILangExpression parentSelectBlock) throws AsterixException {
-        return projection.getExpression().accept(this, parentSelectBlock);
-    }
-
-    @Override
-    public Boolean visit(SelectBlock selectBlock, ILangExpression parentSelectBlock) throws AsterixException {
-        return selectBlock.getSelectClause().accept(this, selectBlock);
-    }
-
-    @Override
-    public Boolean visit(SelectClause selectClause, ILangExpression parentSelectBlock) throws AsterixException {
-        if (selectClause.selectElement()) {
-            return selectClause.getSelectElement().accept(this, parentSelectBlock);
-        } else {
-            return selectClause.getSelectRegular().accept(this, parentSelectBlock);
-        }
-    }
-
-    @Override
-    public Boolean visit(SelectElement selectElement, ILangExpression parentSelectBlock) throws AsterixException {
-        return selectElement.getExpression().accept(this, parentSelectBlock);
-    }
-
-    @Override
-    public Boolean visit(SelectRegular selectRegular, ILangExpression parentSelectBlock) throws AsterixException {
-        for (Projection projection : selectRegular.getProjections()) {
-            if (projection.accept(this, parentSelectBlock)) {
-                return true;
-            }
-        }
-        return false;
-    }
-
-    @Override
-    public Boolean visit(SelectSetOperation selectSetOperation, ILangExpression parentSelectBlock)
-            throws AsterixException {
-        return false;
-    }
-
-    @Override
-    public Boolean visit(SelectExpression selectStatement, ILangExpression parentSelectBlock) throws AsterixException {
-        return false;
-    }
-
-    @Override
-    public Boolean visit(UnnestClause unnestClause, ILangExpression parentSelectBlock) throws AsterixException {
-        return false;
-    }
-
-    @Override
-    public Boolean visit(HavingClause havingClause, ILangExpression parentSelectBlock) throws AsterixException {
-        return false;
-    }
-
-    private Boolean visitExprList(List<Expression> exprs, ILangExpression parentSelectBlock) throws AsterixException {
-        for (Expression item : exprs) {
-            if (item.accept(this, parentSelectBlock)) {
-                return true;
-            }
-        }
-        return false;
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d630d1a2/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/visitor/DeepCopyVisitor.java
----------------------------------------------------------------------
diff --git a/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/visitor/DeepCopyVisitor.java b/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/visitor/DeepCopyVisitor.java
deleted file mode 100644
index 2d891e0..0000000
--- a/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/visitor/DeepCopyVisitor.java
+++ /dev/null
@@ -1,415 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.asterix.lang.sqlpp.visitor;
-
-import java.util.ArrayList;
-import java.util.List;
-
-import org.apache.asterix.common.exceptions.AsterixException;
-import org.apache.asterix.lang.common.base.Expression;
-import org.apache.asterix.lang.common.base.ILangExpression;
-import org.apache.asterix.lang.common.clause.GroupbyClause;
-import org.apache.asterix.lang.common.clause.LetClause;
-import org.apache.asterix.lang.common.clause.LimitClause;
-import org.apache.asterix.lang.common.clause.OrderbyClause;
-import org.apache.asterix.lang.common.clause.WhereClause;
-import org.apache.asterix.lang.common.expression.CallExpr;
-import org.apache.asterix.lang.common.expression.FieldAccessor;
-import org.apache.asterix.lang.common.expression.FieldBinding;
-import org.apache.asterix.lang.common.expression.GbyVariableExpressionPair;
-import org.apache.asterix.lang.common.expression.IfExpr;
-import org.apache.asterix.lang.common.expression.IndexAccessor;
-import org.apache.asterix.lang.common.expression.ListConstructor;
-import org.apache.asterix.lang.common.expression.LiteralExpr;
-import org.apache.asterix.lang.common.expression.OperatorExpr;
-import org.apache.asterix.lang.common.expression.QuantifiedExpression;
-import org.apache.asterix.lang.common.expression.RecordConstructor;
-import org.apache.asterix.lang.common.expression.UnaryExpr;
-import org.apache.asterix.lang.common.expression.VariableExpr;
-import org.apache.asterix.lang.common.statement.FunctionDecl;
-import org.apache.asterix.lang.common.statement.Query;
-import org.apache.asterix.lang.common.struct.Identifier;
-import org.apache.asterix.lang.common.struct.QuantifiedPair;
-import org.apache.asterix.lang.sqlpp.clause.AbstractBinaryCorrelateClause;
-import org.apache.asterix.lang.sqlpp.clause.FromClause;
-import org.apache.asterix.lang.sqlpp.clause.FromTerm;
-import org.apache.asterix.lang.sqlpp.clause.HavingClause;
-import org.apache.asterix.lang.sqlpp.clause.JoinClause;
-import org.apache.asterix.lang.sqlpp.clause.NestClause;
-import org.apache.asterix.lang.sqlpp.clause.Projection;
-import org.apache.asterix.lang.sqlpp.clause.SelectBlock;
-import org.apache.asterix.lang.sqlpp.clause.SelectClause;
-import org.apache.asterix.lang.sqlpp.clause.SelectElement;
-import org.apache.asterix.lang.sqlpp.clause.SelectRegular;
-import org.apache.asterix.lang.sqlpp.clause.SelectSetOperation;
-import org.apache.asterix.lang.sqlpp.clause.UnnestClause;
-import org.apache.asterix.lang.sqlpp.expression.SelectExpression;
-import org.apache.asterix.lang.sqlpp.struct.SetOperationInput;
-import org.apache.asterix.lang.sqlpp.struct.SetOperationRight;
-import org.apache.asterix.lang.sqlpp.visitor.base.AbstractSqlppQueryExpressionVisitor;
-import org.apache.hyracks.algebricks.common.utils.Pair;
-
-public class DeepCopyVisitor extends AbstractSqlppQueryExpressionVisitor<ILangExpression, Void> {
-
-    @Override
-    public FromClause visit(FromClause fromClause, Void arg) throws AsterixException {
-        List<FromTerm> fromTerms = new ArrayList<>();
-        for (FromTerm fromTerm : fromClause.getFromTerms()) {
-            fromTerms.add((FromTerm) fromTerm.accept(this, arg));
-        }
-        return new FromClause(fromTerms);
-    }
-
-    @Override
-    public FromTerm visit(FromTerm fromTerm, Void arg) throws AsterixException {
-        // Visit the left expression of a from term.
-        Expression fromExpr = (Expression) fromTerm.getLeftExpression().accept(this, arg);
-        VariableExpr fromVar = (VariableExpr) fromTerm.getLeftVariable().accept(this, arg);
-        VariableExpr positionVar = fromTerm.getPositionalVariable() == null ? null
-                : (VariableExpr) fromTerm.getPositionalVariable().accept(this, arg);
-
-        // Visits join/unnest/nest clauses.
-        List<AbstractBinaryCorrelateClause> correlateClauses = new ArrayList<>();
-        for (AbstractBinaryCorrelateClause correlateClause : fromTerm.getCorrelateClauses()) {
-            correlateClauses.add((AbstractBinaryCorrelateClause) correlateClause.accept(this, arg));
-        }
-        return new FromTerm(fromExpr, fromVar, positionVar, correlateClauses);
-    }
-
-    @Override
-    public JoinClause visit(JoinClause joinClause, Void arg) throws AsterixException {
-        Expression rightExpression = (Expression) joinClause.getRightExpression().accept(this, arg);
-        VariableExpr rightVar = (VariableExpr) joinClause.getRightVariable().accept(this, arg);
-        VariableExpr rightPositionVar = joinClause.getPositionalVariable() == null ? null
-                : (VariableExpr) joinClause.getPositionalVariable().accept(this, arg);
-        Expression conditionExpresion = (Expression) joinClause.getConditionExpression().accept(this, arg);
-        return new JoinClause(joinClause.getJoinType(), rightExpression, rightVar, rightPositionVar,
-                conditionExpresion);
-    }
-
-    @Override
-    public NestClause visit(NestClause nestClause, Void arg) throws AsterixException {
-        Expression rightExpression = (Expression) nestClause.getRightExpression().accept(this, arg);
-        VariableExpr rightVar = (VariableExpr) nestClause.getRightVariable().accept(this, arg);
-        VariableExpr rightPositionVar = nestClause.getPositionalVariable() == null ? null
-                : (VariableExpr) nestClause.getPositionalVariable().accept(this, arg);
-        Expression conditionExpresion = (Expression) nestClause.getConditionExpression().accept(this, arg);
-        return new NestClause(nestClause.getJoinType(), rightExpression, rightVar, rightPositionVar,
-                conditionExpresion);
-    }
-
-    @Override
-    public UnnestClause visit(UnnestClause unnestClause, Void arg) throws AsterixException {
-        Expression rightExpression = (Expression) unnestClause.getRightExpression().accept(this, arg);
-        VariableExpr rightVar = (VariableExpr) unnestClause.getRightVariable().accept(this, arg);
-        VariableExpr rightPositionVar = unnestClause.getPositionalVariable() == null ? null
-                : (VariableExpr) unnestClause.getPositionalVariable().accept(this, arg);
-        return new UnnestClause(unnestClause.getJoinType(), rightExpression, rightVar, rightPositionVar);
-    }
-
-    @Override
-    public Projection visit(Projection projection, Void arg) throws AsterixException {
-        return new Projection((Expression) projection.getExpression().accept(this, arg), projection.getName(),
-                projection.star(), projection.exprStar());
-    }
-
-    @Override
-    public SelectBlock visit(SelectBlock selectBlock, Void arg) throws AsterixException {
-        FromClause fromClause = null;
-        List<LetClause> letClauses = new ArrayList<>();
-        WhereClause whereClause = null;
-        GroupbyClause gbyClause = null;
-        List<LetClause> gbyLetClauses = new ArrayList<>();
-        HavingClause havingClause = null;
-        SelectClause selectCluase = null;
-        // Traverses the select block in the order of "from", "let"s, "where",
-        // "group by", "let"s, "having" and "select".
-        if (selectBlock.hasFromClause()) {
-            fromClause = (FromClause) selectBlock.getFromClause().accept(this, arg);
-        }
-        if (selectBlock.hasLetClauses()) {
-            List<LetClause> letList = selectBlock.getLetList();
-            for (LetClause letClause : letList) {
-                letClauses.add((LetClause) letClause.accept(this, arg));
-            }
-        }
-        if (selectBlock.hasWhereClause()) {
-            whereClause = (WhereClause) selectBlock.getWhereClause().accept(this, arg);
-        }
-        if (selectBlock.hasGroupbyClause()) {
-            gbyClause = (GroupbyClause) selectBlock.getGroupbyClause().accept(this, arg);
-        }
-        if (selectBlock.hasLetClausesAfterGroupby()) {
-            List<LetClause> letListAfterGby = selectBlock.getLetListAfterGroupby();
-            for (LetClause letClauseAfterGby : letListAfterGby) {
-                gbyLetClauses.add((LetClause) letClauseAfterGby.accept(this, arg));
-            }
-        }
-        if (selectBlock.hasHavingClause()) {
-            havingClause = (HavingClause) selectBlock.getHavingClause().accept(this, arg);
-        }
-        selectCluase = (SelectClause) selectBlock.getSelectClause().accept(this, arg);
-        return new SelectBlock(selectCluase, fromClause, letClauses, whereClause, gbyClause, gbyLetClauses,
-                havingClause);
-    }
-
-    @Override
-    public SelectClause visit(SelectClause selectClause, Void arg) throws AsterixException {
-        SelectElement selectElement = null;
-        SelectRegular selectRegular = null;
-        if (selectClause.selectElement()) {
-            selectElement = (SelectElement) selectClause.getSelectElement().accept(this, arg);
-        }
-        if (selectClause.selectRegular()) {
-            selectRegular = (SelectRegular) selectClause.getSelectRegular().accept(this, arg);
-        }
-        return new SelectClause(selectElement, selectRegular, selectClause.distinct());
-    }
-
-    @Override
-    public SelectElement visit(SelectElement selectElement, Void arg) throws AsterixException {
-        return new SelectElement((Expression) selectElement.getExpression().accept(this, arg));
-    }
-
-    @Override
-    public SelectRegular visit(SelectRegular selectRegular, Void arg) throws AsterixException {
-        List<Projection> projections = new ArrayList<>();
-        for (Projection projection : selectRegular.getProjections()) {
-            projections.add((Projection) projection.accept(this, arg));
-        }
-        return new SelectRegular(projections);
-    }
-
-    @Override
-    public SelectSetOperation visit(SelectSetOperation selectSetOperation, Void arg) throws AsterixException {
-        SetOperationInput leftInput = selectSetOperation.getLeftInput();
-        SetOperationInput newLeftInput = null;
-        if (leftInput.selectBlock()) {
-            newLeftInput = new SetOperationInput((SelectBlock) leftInput.accept(this, arg), null);
-        } else {
-            newLeftInput = new SetOperationInput(null, (SelectExpression) leftInput.accept(this, arg));
-        }
-        List<SetOperationRight> rightInputs = new ArrayList<>();
-        for (SetOperationRight right : selectSetOperation.getRightInputs()) {
-            SetOperationInput newRightInput = null;
-            SetOperationInput setOpRightInput = right.getSetOperationRightInput();
-            if (setOpRightInput.selectBlock()) {
-                newRightInput = new SetOperationInput((SelectBlock) leftInput.accept(this, arg), null);
-            } else {
-                newRightInput = new SetOperationInput(null, (SelectExpression) leftInput.accept(this, arg));
-            }
-            rightInputs.add(new SetOperationRight(right.getSetOpType(), right.isSetSemantics(), newRightInput));
-        }
-        return new SelectSetOperation(newLeftInput, rightInputs);
-    }
-
-    @Override
-    public HavingClause visit(HavingClause havingClause, Void arg) throws AsterixException {
-        return new HavingClause((Expression) havingClause.getFilterExpression().accept(this, arg));
-    }
-
-    @Override
-    public Query visit(Query q, Void arg) throws AsterixException {
-        return new Query(q.isTopLevel(), (Expression) q.getBody().accept(this, arg), q.getVarCounter(),
-                q.getDataverses(), q.getDatasets());
-    }
-
-    @Override
-    public FunctionDecl visit(FunctionDecl fd, Void arg) throws AsterixException {
-        return new FunctionDecl(fd.getSignature(), fd.getParamList(), (Expression) fd.getFuncBody().accept(this, arg));
-    }
-
-    @Override
-    public WhereClause visit(WhereClause whereClause, Void arg) throws AsterixException {
-        return new WhereClause((Expression) whereClause.getWhereExpr().accept(this, arg));
-    }
-
-    @Override
-    public OrderbyClause visit(OrderbyClause oc, Void arg) throws AsterixException {
-        List<Expression> newOrderbyList = new ArrayList<Expression>();
-        for (Expression orderExpr : oc.getOrderbyList()) {
-            newOrderbyList.add((Expression) orderExpr.accept(this, arg));
-        }
-        return new OrderbyClause(newOrderbyList, oc.getModifierList());
-    }
-
-    @Override
-    public GroupbyClause visit(GroupbyClause gc, Void arg) throws AsterixException {
-        List<GbyVariableExpressionPair> gbyPairList = new ArrayList<>();
-        List<GbyVariableExpressionPair> decorPairList = new ArrayList<>();
-        List<VariableExpr> withVarList = new ArrayList<>();
-        VariableExpr groupVarExpr = null;
-        List<Pair<Expression, Identifier>> groupFieldList = new ArrayList<>();
-        for (GbyVariableExpressionPair gbyVarExpr : gc.getGbyPairList()) {
-            gbyPairList.add(new GbyVariableExpressionPair((VariableExpr) gbyVarExpr.getVar().accept(this, arg),
-                    (Expression) gbyVarExpr.getExpr().accept(this, arg)));
-        }
-        for (GbyVariableExpressionPair gbyVarExpr : gc.getDecorPairList()) {
-            decorPairList.add(new GbyVariableExpressionPair((VariableExpr) gbyVarExpr.getVar().accept(this, arg),
-                    (Expression) gbyVarExpr.getExpr().accept(this, arg)));
-        }
-        for (VariableExpr withVar : gc.getWithVarList()) {
-            withVarList.add((VariableExpr) withVar.accept(this, arg));
-        }
-        if (gc.hasGroupVar()) {
-            groupVarExpr = (VariableExpr) gc.getGroupVar().accept(this, arg);
-        }
-        for (Pair<Expression, Identifier> field : gc.getGroupFieldList()) {
-            groupFieldList.add(new Pair<>((Expression) field.first.accept(this, arg), field.second));
-        }
-        return new GroupbyClause(gbyPairList, decorPairList, withVarList, groupVarExpr, groupFieldList,
-                gc.hasHashGroupByHint(), gc.isGroupAll());
-    }
-
-    @Override
-    public LimitClause visit(LimitClause limitClause, Void arg) throws AsterixException {
-        Expression limitExpr = (Expression) limitClause.getLimitExpr().accept(this, arg);
-        Expression offsetExpr = limitClause.hasOffset() ? (Expression) limitClause.getOffset().accept(this, arg) : null;
-        return new LimitClause(limitExpr, offsetExpr);
-    }
-
-    @Override
-    public LetClause visit(LetClause letClause, Void arg) throws AsterixException {
-        return new LetClause((VariableExpr) letClause.getVarExpr().accept(this, arg),
-                (Expression) letClause.getBindingExpr().accept(this, arg));
-    }
-
-    @Override
-    public SelectExpression visit(SelectExpression selectExpression, Void arg) throws AsterixException {
-        List<LetClause> lets = new ArrayList<>();
-        SelectSetOperation select = null;
-        OrderbyClause orderby = null;
-        LimitClause limit = null;
-
-        // visit let list
-        if (selectExpression.hasLetClauses()) {
-            for (LetClause letClause : selectExpression.getLetList()) {
-                lets.add((LetClause) letClause.accept(this, arg));
-            }
-        }
-
-        // visit the main select.
-        select = (SelectSetOperation) selectExpression.getSelectSetOperation().accept(this, arg);
-
-        // visit order by
-        if (selectExpression.hasOrderby()) {
-            List<Expression> orderExprs = new ArrayList<>();
-            for (Expression orderExpr : selectExpression.getOrderbyClause().getOrderbyList()) {
-                orderExprs.add((Expression) orderExpr.accept(this, arg));
-            }
-            orderby = new OrderbyClause(orderExprs, selectExpression.getOrderbyClause().getModifierList());
-        }
-
-        // visit limit
-        if (selectExpression.hasLimit()) {
-            limit = (LimitClause) selectExpression.getLimitClause().accept(this, arg);
-        }
-        return new SelectExpression(lets, select, orderby, limit, selectExpression.isSubquery());
-    }
-
-    @Override
-    public LiteralExpr visit(LiteralExpr l, Void arg) throws AsterixException {
-        return l;
-    }
-
-    @Override
-    public ListConstructor visit(ListConstructor lc, Void arg) throws AsterixException {
-        List<Expression> newExprList = new ArrayList<Expression>();
-        for (Expression expr : lc.getExprList()) {
-            newExprList.add((Expression) expr.accept(this, arg));
-        }
-        return new ListConstructor(lc.getType(), newExprList);
-    }
-
-    @Override
-    public RecordConstructor visit(RecordConstructor rc, Void arg) throws AsterixException {
-        List<FieldBinding> bindings = new ArrayList<>();
-        for (FieldBinding binding : rc.getFbList()) {
-            FieldBinding fb = new FieldBinding((Expression) binding.getLeftExpr().accept(this, arg),
-                    (Expression) binding.getRightExpr().accept(this, arg));
-            bindings.add(fb);
-        }
-        return new RecordConstructor(bindings);
-    }
-
-    @Override
-    public OperatorExpr visit(OperatorExpr operatorExpr, Void arg) throws AsterixException {
-        List<Expression> newExprList = new ArrayList<Expression>();
-        for (Expression expr : operatorExpr.getExprList()) {
-            newExprList.add((Expression) expr.accept(this, arg));
-        }
-        return new OperatorExpr(newExprList, operatorExpr.getExprBroadcastIdx(), operatorExpr.getOpList(),
-                operatorExpr.isCurrentop());
-    }
-
-    @Override
-    public IfExpr visit(IfExpr ifExpr, Void arg) throws AsterixException {
-        Expression conditionExpr = (Expression) ifExpr.getCondExpr().accept(this, arg);
-        Expression thenExpr = (Expression) ifExpr.getThenExpr().accept(this, arg);
-        Expression elseExpr = (Expression) ifExpr.getElseExpr().accept(this, arg);
-        return new IfExpr(conditionExpr, thenExpr, elseExpr);
-    }
-
-    @Override
-    public QuantifiedExpression visit(QuantifiedExpression qe, Void arg) throws AsterixException {
-        List<QuantifiedPair> quantifiedPairs = new ArrayList<>();
-        for (QuantifiedPair pair : qe.getQuantifiedList()) {
-            Expression expr = (Expression) pair.getExpr().accept(this, arg);
-            VariableExpr var = (VariableExpr) pair.getVarExpr().accept(this, arg);
-            quantifiedPairs.add(new QuantifiedPair(var, expr));
-        }
-        Expression condition = (Expression) qe.getSatisfiesExpr().accept(this, arg);
-        return new QuantifiedExpression(qe.getQuantifier(), quantifiedPairs, condition);
-    }
-
-    @Override
-    public CallExpr visit(CallExpr callExpr, Void arg) throws AsterixException {
-        List<Expression> newExprList = new ArrayList<Expression>();
-        for (Expression expr : callExpr.getExprList()) {
-            newExprList.add((Expression) expr.accept(this, arg));
-        }
-        return new CallExpr(callExpr.getFunctionSignature(), newExprList);
-    }
-
-    @Override
-    public VariableExpr visit(VariableExpr varExpr, Void arg) throws AsterixException {
-        return new VariableExpr(varExpr.getVar());
-    }
-
-    @Override
-    public UnaryExpr visit(UnaryExpr u, Void arg) throws AsterixException {
-        return new UnaryExpr(u.getSign(), (Expression) u.getExpr().accept(this, arg));
-    }
-
-    @Override
-    public FieldAccessor visit(FieldAccessor fa, Void arg) throws AsterixException {
-        return new FieldAccessor((Expression) fa.getExpr().accept(this, arg), fa.getIdent());
-    }
-
-    @Override
-    public Expression visit(IndexAccessor ia, Void arg) throws AsterixException {
-        Expression expr = (Expression) ia.getExpr().accept(this, arg);
-        Expression indexExpr = null;
-        if (ia.getIndexExpr() != null) {
-            indexExpr = ia.getIndexExpr();
-        }
-        return new IndexAccessor(expr, indexExpr);
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d630d1a2/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/visitor/FreeVariableVisitor.java
----------------------------------------------------------------------
diff --git a/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/visitor/FreeVariableVisitor.java b/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/visitor/FreeVariableVisitor.java
deleted file mode 100644
index 6e70455..0000000
--- a/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/visitor/FreeVariableVisitor.java
+++ /dev/null
@@ -1,471 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.asterix.lang.sqlpp.visitor;
-
-import java.util.Collection;
-import java.util.HashSet;
-import java.util.List;
-
-import org.apache.asterix.common.exceptions.AsterixException;
-import org.apache.asterix.lang.common.base.Clause.ClauseType;
-import org.apache.asterix.lang.common.base.Expression;
-import org.apache.asterix.lang.common.clause.GroupbyClause;
-import org.apache.asterix.lang.common.clause.LetClause;
-import org.apache.asterix.lang.common.clause.LimitClause;
-import org.apache.asterix.lang.common.clause.OrderbyClause;
-import org.apache.asterix.lang.common.clause.WhereClause;
-import org.apache.asterix.lang.common.expression.CallExpr;
-import org.apache.asterix.lang.common.expression.FieldAccessor;
-import org.apache.asterix.lang.common.expression.FieldBinding;
-import org.apache.asterix.lang.common.expression.GbyVariableExpressionPair;
-import org.apache.asterix.lang.common.expression.IfExpr;
-import org.apache.asterix.lang.common.expression.IndexAccessor;
-import org.apache.asterix.lang.common.expression.ListConstructor;
-import org.apache.asterix.lang.common.expression.LiteralExpr;
-import org.apache.asterix.lang.common.expression.OperatorExpr;
-import org.apache.asterix.lang.common.expression.QuantifiedExpression;
-import org.apache.asterix.lang.common.expression.RecordConstructor;
-import org.apache.asterix.lang.common.expression.UnaryExpr;
-import org.apache.asterix.lang.common.expression.VariableExpr;
-import org.apache.asterix.lang.common.statement.FunctionDecl;
-import org.apache.asterix.lang.common.statement.Query;
-import org.apache.asterix.lang.common.struct.Identifier;
-import org.apache.asterix.lang.common.struct.QuantifiedPair;
-import org.apache.asterix.lang.sqlpp.clause.AbstractBinaryCorrelateClause;
-import org.apache.asterix.lang.sqlpp.clause.FromClause;
-import org.apache.asterix.lang.sqlpp.clause.FromTerm;
-import org.apache.asterix.lang.sqlpp.clause.HavingClause;
-import org.apache.asterix.lang.sqlpp.clause.JoinClause;
-import org.apache.asterix.lang.sqlpp.clause.NestClause;
-import org.apache.asterix.lang.sqlpp.clause.Projection;
-import org.apache.asterix.lang.sqlpp.clause.SelectBlock;
-import org.apache.asterix.lang.sqlpp.clause.SelectClause;
-import org.apache.asterix.lang.sqlpp.clause.SelectElement;
-import org.apache.asterix.lang.sqlpp.clause.SelectRegular;
-import org.apache.asterix.lang.sqlpp.clause.SelectSetOperation;
-import org.apache.asterix.lang.sqlpp.clause.UnnestClause;
-import org.apache.asterix.lang.sqlpp.expression.SelectExpression;
-import org.apache.asterix.lang.sqlpp.struct.SetOperationRight;
-import org.apache.asterix.lang.sqlpp.util.SqlppVariableUtil;
-import org.apache.asterix.lang.sqlpp.visitor.base.AbstractSqlppQueryExpressionVisitor;
-import org.apache.hyracks.algebricks.common.utils.Pair;
-
-public class FreeVariableVisitor extends AbstractSqlppQueryExpressionVisitor<Void, Collection<VariableExpr>> {
-
-    @Override
-    public Void visit(FromClause fromClause, Collection<VariableExpr> freeVars) throws AsterixException {
-        Collection<VariableExpr> bindingVars = new HashSet<>();
-        for (FromTerm fromTerm : fromClause.getFromTerms()) {
-            Collection<VariableExpr> fromTermFreeVars = new HashSet<>();
-            fromTerm.accept(this, fromTermFreeVars);
-
-            // Since a right from term can refer to variables defined in a left from term,
-            // we remove binding variables from the free variables.
-            fromTermFreeVars.removeAll(bindingVars);
-
-            // Adds binding variables.
-            bindingVars.addAll(SqlppVariableUtil.getBindingVariables(fromTerm));
-
-            // Adds into freeVars.
-            freeVars.addAll(fromTermFreeVars);
-        }
-        return null;
-    }
-
-    @Override
-    public Void visit(FromTerm fromTerm, Collection<VariableExpr> freeVars) throws AsterixException {
-        // The encountered binding variables so far in the fromterm.
-        Collection<VariableExpr> bindingVariables = new HashSet<>();
-
-        // Visit the left expression of a from term.
-        fromTerm.getLeftExpression().accept(this, freeVars);
-
-        // Adds binding variables.
-        bindingVariables.add(fromTerm.getLeftVariable());
-        if (fromTerm.hasPositionalVariable()) {
-            bindingVariables.add(fromTerm.getPositionalVariable());
-        }
-
-        // Visits join/unnest/nest clauses.
-        for (AbstractBinaryCorrelateClause correlateClause : fromTerm.getCorrelateClauses()) {
-            Collection<VariableExpr> correlateFreeVars = new HashSet<>();
-            correlateClause.accept(this, correlateFreeVars);
-            if (correlateClause.getClauseType() != ClauseType.JOIN_CLAUSE) {
-                // Correlation is allowed if the clause is not a join clause,
-                // therefore we remove left-side binding variables for these cases.
-                correlateFreeVars.removeAll(bindingVariables);
-
-                // Adds binding variables.
-                bindingVariables.add(correlateClause.getRightVariable());
-                if (correlateClause.hasPositionalVariable()) {
-                    bindingVariables.add(correlateClause.getPositionalVariable());
-                }
-            }
-            freeVars.addAll(correlateFreeVars);
-        }
-        return null;
-    }
-
-    @Override
-    public Void visit(JoinClause joinClause, Collection<VariableExpr> freeVars) throws AsterixException {
-        visitJoinAndNest(joinClause, joinClause.getConditionExpression(), freeVars);
-        return null;
-    }
-
-    @Override
-    public Void visit(NestClause nestClause, Collection<VariableExpr> freeVars) throws AsterixException {
-        visitJoinAndNest(nestClause, nestClause.getConditionExpression(), freeVars);
-        return null;
-    }
-
-    @Override
-    public Void visit(UnnestClause unnestClause, Collection<VariableExpr> freeVars) throws AsterixException {
-        unnestClause.getRightExpression().accept(this, freeVars);
-        return null;
-    }
-
-    @Override
-    public Void visit(Projection projection, Collection<VariableExpr> freeVars) throws AsterixException {
-        projection.getExpression().accept(this, freeVars);
-        return null;
-    }
-
-    @Override
-    public Void visit(SelectBlock selectBlock, Collection<VariableExpr> freeVars) throws AsterixException {
-        Collection<VariableExpr> selectFreeVars = new HashSet<>();
-        Collection<VariableExpr> fromFreeVars = new HashSet<>();
-        Collection<VariableExpr> letsFreeVars = new HashSet<>();
-        Collection<VariableExpr> whereFreeVars = new HashSet<>();
-        Collection<VariableExpr> gbyFreeVars = new HashSet<>();
-        Collection<VariableExpr> gbyLetsFreeVars = new HashSet<>();
-
-        Collection<VariableExpr> fromBindingVars = SqlppVariableUtil.getBindingVariables(selectBlock.getFromClause());
-        Collection<VariableExpr> letsBindingVars = SqlppVariableUtil.getBindingVariables(selectBlock.getLetList());
-        Collection<VariableExpr> gbyBindingVars = SqlppVariableUtil.getBindingVariables(selectBlock.getGroupbyClause());
-        Collection<VariableExpr> gbyLetsBindingVars = SqlppVariableUtil
-                .getBindingVariables(selectBlock.getLetListAfterGroupby());
-
-        selectBlock.getSelectClause().accept(this, selectFreeVars);
-        // Removes group-by, from, let, and gby-let binding vars.
-        removeAllBindingVarsInSelectBlock(selectFreeVars, fromBindingVars, letsBindingVars, gbyLetsBindingVars);
-
-        if (selectBlock.hasFromClause()) {
-            selectBlock.getFromClause().accept(this, fromFreeVars);
-        }
-        if (selectBlock.hasLetClauses()) {
-            visitLetClauses(selectBlock.getLetList(), letsFreeVars);
-            letsFreeVars.removeAll(fromBindingVars);
-        }
-        if (selectBlock.hasWhereClause()) {
-            selectBlock.getWhereClause().accept(this, whereFreeVars);
-            whereFreeVars.removeAll(fromBindingVars);
-            whereFreeVars.removeAll(letsBindingVars);
-        }
-        if (selectBlock.hasGroupbyClause()) {
-            selectBlock.getGroupbyClause().accept(this, gbyFreeVars);
-            // Remove group-by and let binding vars.
-            gbyFreeVars.removeAll(fromBindingVars);
-            gbyFreeVars.removeAll(letsBindingVars);
-            if (selectBlock.hasLetClausesAfterGroupby()) {
-                visitLetClauses(selectBlock.getLetListAfterGroupby(), gbyLetsFreeVars);
-                gbyLetsFreeVars.removeAll(fromBindingVars);
-                gbyLetsFreeVars.removeAll(letsBindingVars);
-                gbyLetsFreeVars.removeAll(gbyBindingVars);
-            }
-            if (selectBlock.hasHavingClause()) {
-                selectBlock.getHavingClause().accept(this, selectFreeVars);
-                removeAllBindingVarsInSelectBlock(selectFreeVars, fromBindingVars, letsBindingVars, gbyLetsBindingVars);
-            }
-        }
-
-        // Removes all binding vars from <code>freeVars</code>, which contains the free
-        // vars in the order-by and limit.
-        removeAllBindingVarsInSelectBlock(freeVars, fromBindingVars, letsBindingVars, gbyLetsBindingVars);
-
-        // Adds all free vars.
-        freeVars.addAll(selectFreeVars);
-        freeVars.addAll(fromFreeVars);
-        freeVars.addAll(letsFreeVars);
-        freeVars.addAll(whereFreeVars);
-        freeVars.addAll(gbyFreeVars);
-        freeVars.addAll(gbyLetsFreeVars);
-        return null;
-    }
-
-    @Override
-    public Void visit(SelectClause selectClause, Collection<VariableExpr> freeVars) throws AsterixException {
-        if (selectClause.selectElement()) {
-            selectClause.getSelectElement().accept(this, freeVars);
-        }
-        if (selectClause.selectRegular()) {
-            selectClause.getSelectRegular().accept(this, freeVars);
-        }
-        return null;
-    }
-
-    @Override
-    public Void visit(SelectElement selectElement, Collection<VariableExpr> freeVars) throws AsterixException {
-        selectElement.getExpression().accept(this, freeVars);
-        return null;
-    }
-
-    @Override
-    public Void visit(SelectRegular selectRegular, Collection<VariableExpr> freeVars) throws AsterixException {
-        for (Projection projection : selectRegular.getProjections()) {
-            projection.accept(this, freeVars);
-        }
-        return null;
-    }
-
-    @Override
-    public Void visit(SelectSetOperation selectSetOperation, Collection<VariableExpr> freeVars)
-            throws AsterixException {
-        selectSetOperation.getLeftInput().accept(this, freeVars);
-        for (SetOperationRight right : selectSetOperation.getRightInputs()) {
-            right.getSetOperationRightInput().accept(this, freeVars);
-        }
-        return null;
-    }
-
-    @Override
-    public Void visit(HavingClause havingClause, Collection<VariableExpr> freeVars) throws AsterixException {
-        havingClause.getFilterExpression().accept(this, freeVars);
-        return null;
-    }
-
-    @Override
-    public Void visit(Query q, Collection<VariableExpr> freeVars) throws AsterixException {
-        q.getBody().accept(this, freeVars);
-        return null;
-    }
-
-    @Override
-    public Void visit(FunctionDecl fd, Collection<VariableExpr> freeVars) throws AsterixException {
-        fd.getFuncBody().accept(this, freeVars);
-        return null;
-    }
-
-    @Override
-    public Void visit(WhereClause whereClause, Collection<VariableExpr> freeVars) throws AsterixException {
-        whereClause.getWhereExpr().accept(this, freeVars);
-        return null;
-    }
-
-    @Override
-    public Void visit(OrderbyClause oc, Collection<VariableExpr> freeVars) throws AsterixException {
-        for (Expression orderExpr : oc.getOrderbyList()) {
-            orderExpr.accept(this, freeVars);
-        }
-        return null;
-    }
-
-    @Override
-    public Void visit(GroupbyClause gc, Collection<VariableExpr> freeVars) throws AsterixException {
-        // Puts all group-by variables into the symbol set of the new scope.
-        for (GbyVariableExpressionPair gbyVarExpr : gc.getGbyPairList()) {
-            gbyVarExpr.getExpr().accept(this, freeVars);
-        }
-        for (GbyVariableExpressionPair decorVarExpr : gc.getDecorPairList()) {
-            decorVarExpr.getExpr().accept(this, freeVars);
-        }
-        if (gc.hasGroupFieldList()) {
-            for (Pair<Expression, Identifier> groupField : gc.getGroupFieldList()) {
-                groupField.first.accept(this, freeVars);
-            }
-        }
-        return null;
-    }
-
-    @Override
-    public Void visit(LimitClause limitClause, Collection<VariableExpr> freeVars) throws AsterixException {
-        limitClause.getLimitExpr().accept(this, freeVars);
-        return null;
-    }
-
-    @Override
-    public Void visit(LetClause letClause, Collection<VariableExpr> freeVars) throws AsterixException {
-        letClause.getBindingExpr().accept(this, freeVars);
-        return null;
-    }
-
-    @Override
-    public Void visit(SelectExpression selectExpression, Collection<VariableExpr> freeVars) throws AsterixException {
-        Collection<VariableExpr> letsFreeVars = new HashSet<>();
-        Collection<VariableExpr> selectFreeVars = new HashSet<>();
-        visitLetClauses(selectExpression.getLetList(), letsFreeVars);
-
-        // visit order by
-        if (selectExpression.hasOrderby()) {
-            for (Expression orderExpr : selectExpression.getOrderbyClause().getOrderbyList()) {
-                orderExpr.accept(this, selectFreeVars);
-            }
-        }
-
-        // visit limit
-        if (selectExpression.hasLimit()) {
-            selectExpression.getLimitClause().accept(this, selectFreeVars);
-        }
-
-        // visit the main select
-        selectExpression.getSelectSetOperation().accept(this, selectFreeVars);
-
-        // Removed let binding variables.
-        selectFreeVars.removeAll(SqlppVariableUtil.getBindingVariables(selectExpression.getLetList()));
-        freeVars.addAll(letsFreeVars);
-        freeVars.addAll(selectFreeVars);
-        return null;
-    }
-
-    @Override
-    public Void visit(LiteralExpr l, Collection<VariableExpr> freeVars) throws AsterixException {
-        return null;
-    }
-
-    @Override
-    public Void visit(ListConstructor lc, Collection<VariableExpr> freeVars) throws AsterixException {
-        for (Expression expr : lc.getExprList()) {
-            expr.accept(this, freeVars);
-        }
-        return null;
-    }
-
-    @Override
-    public Void visit(RecordConstructor rc, Collection<VariableExpr> freeVars) throws AsterixException {
-        for (FieldBinding binding : rc.getFbList()) {
-            binding.getLeftExpr().accept(this, freeVars);
-            binding.getRightExpr().accept(this, freeVars);
-        }
-        return null;
-    }
-
-    @Override
-    public Void visit(OperatorExpr operatorExpr, Collection<VariableExpr> freeVars) throws AsterixException {
-        for (Expression expr : operatorExpr.getExprList()) {
-            expr.accept(this, freeVars);
-        }
-        return null;
-    }
-
-    @Override
-    public Void visit(IfExpr ifExpr, Collection<VariableExpr> freeVars) throws AsterixException {
-        ifExpr.getCondExpr().accept(this, freeVars);
-        ifExpr.getThenExpr().accept(this, freeVars);
-        ifExpr.getElseExpr().accept(this, freeVars);
-        return null;
-    }
-
-    @Override
-    public Void visit(QuantifiedExpression qe, Collection<VariableExpr> freeVars) throws AsterixException {
-        for (QuantifiedPair pair : qe.getQuantifiedList()) {
-            pair.getExpr().accept(this, freeVars);
-        }
-        qe.getSatisfiesExpr().accept(this, freeVars);
-        return null;
-    }
-
-    @Override
-    public Void visit(CallExpr callExpr, Collection<VariableExpr> freeVars) throws AsterixException {
-        for (Expression expr : callExpr.getExprList()) {
-            expr.accept(this, freeVars);
-        }
-        return null;
-    }
-
-    @Override
-    public Void visit(VariableExpr varExpr, Collection<VariableExpr> freeVars) throws AsterixException {
-        freeVars.add(varExpr);
-        return null;
-    }
-
-    @Override
-    public Void visit(UnaryExpr u, Collection<VariableExpr> freeVars) throws AsterixException {
-        u.getExpr().accept(this, freeVars);
-        return null;
-    }
-
-    @Override
-    public Void visit(FieldAccessor fa, Collection<VariableExpr> freeVars) throws AsterixException {
-        fa.getExpr().accept(this, freeVars);
-        return null;
-    }
-
-    @Override
-    public Void visit(IndexAccessor ia, Collection<VariableExpr> freeVars) throws AsterixException {
-        ia.getExpr().accept(this, freeVars);
-        if (ia.getIndexExpr() != null) {
-            ia.getIndexExpr();
-        }
-        return null;
-    }
-
-    private void visitLetClauses(List<LetClause> letClauses, Collection<VariableExpr> freeVars)
-            throws AsterixException {
-        if (letClauses == null || letClauses.isEmpty()) {
-            return;
-        }
-        Collection<VariableExpr> bindingVars = new HashSet<>();
-        for (LetClause letClause : letClauses) {
-            Collection<VariableExpr> letFreeVars = new HashSet<>();
-            letClause.accept(this, letFreeVars);
-
-            // Removes previous binding variables.
-            letFreeVars.removeAll(bindingVars);
-            freeVars.addAll(letFreeVars);
-
-            // Adds let binding variables into the binding variable collection.
-            bindingVars.add(letClause.getVarExpr());
-        }
-    }
-
-    private void visitJoinAndNest(AbstractBinaryCorrelateClause clause, Expression condition,
-            Collection<VariableExpr> freeVars) throws AsterixException {
-        clause.getRightExpression().accept(this, freeVars);
-        Collection<VariableExpr> conditionFreeVars = new HashSet<>();
-        condition.accept(this, freeVars);
-
-        // The condition expression can free binding variables defined in the join clause.
-        conditionFreeVars.remove(clause.getRightVariable());
-        if (clause.hasPositionalVariable()) {
-            conditionFreeVars.remove(clause.getPositionalVariable());
-        }
-        freeVars.addAll(conditionFreeVars);
-    }
-
-    /**
-     * Removes all binding variables defined in the select block for a free variable collection.
-     *
-     * @param freeVars,
-     *            free variables.
-     * @param fromBindingVars,
-     *            binding variables defined in the from clause of a select block.
-     * @param letsBindingVars,
-     *            binding variables defined in the let clauses of the select block.
-     * @param gbyLetsBindingVars,
-     *            binding variables defined in the let clauses after a group-by in the select block.
-     */
-    private void removeAllBindingVarsInSelectBlock(Collection<VariableExpr> selectFreeVars,
-            Collection<VariableExpr> fromBindingVars, Collection<VariableExpr> letsBindingVars,
-            Collection<VariableExpr> gbyLetsBindingVars) {
-        selectFreeVars.removeAll(fromBindingVars);
-        selectFreeVars.removeAll(letsBindingVars);
-        selectFreeVars.removeAll(gbyLetsBindingVars);
-        selectFreeVars.removeAll(gbyLetsBindingVars);
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d630d1a2/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/visitor/base/AbstractSqlppExpressionScopingVisitor.java
----------------------------------------------------------------------
diff --git a/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/visitor/base/AbstractSqlppExpressionScopingVisitor.java b/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/visitor/base/AbstractSqlppExpressionScopingVisitor.java
deleted file mode 100644
index 14e80d9..0000000
--- a/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/visitor/base/AbstractSqlppExpressionScopingVisitor.java
+++ /dev/null
@@ -1,284 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.asterix.lang.sqlpp.visitor.base;
-
-import org.apache.asterix.common.exceptions.AsterixException;
-import org.apache.asterix.lang.common.base.Expression;
-import org.apache.asterix.lang.common.clause.GroupbyClause;
-import org.apache.asterix.lang.common.clause.LetClause;
-import org.apache.asterix.lang.common.clause.LimitClause;
-import org.apache.asterix.lang.common.context.Scope;
-import org.apache.asterix.lang.common.expression.GbyVariableExpressionPair;
-import org.apache.asterix.lang.common.expression.QuantifiedExpression;
-import org.apache.asterix.lang.common.expression.VariableExpr;
-import org.apache.asterix.lang.common.parser.ScopeChecker;
-import org.apache.asterix.lang.common.rewrites.LangRewritingContext;
-import org.apache.asterix.lang.common.statement.FunctionDecl;
-import org.apache.asterix.lang.common.statement.Query;
-import org.apache.asterix.lang.common.struct.Identifier;
-import org.apache.asterix.lang.common.struct.QuantifiedPair;
-import org.apache.asterix.lang.common.struct.VarIdentifier;
-import org.apache.asterix.lang.sqlpp.clause.AbstractBinaryCorrelateClause;
-import org.apache.asterix.lang.sqlpp.clause.FromClause;
-import org.apache.asterix.lang.sqlpp.clause.FromTerm;
-import org.apache.asterix.lang.sqlpp.clause.JoinClause;
-import org.apache.asterix.lang.sqlpp.clause.NestClause;
-import org.apache.asterix.lang.sqlpp.clause.SelectSetOperation;
-import org.apache.asterix.lang.sqlpp.clause.UnnestClause;
-import org.apache.asterix.lang.sqlpp.expression.SelectExpression;
-import org.apache.asterix.lang.sqlpp.struct.SetOperationRight;
-import org.apache.hyracks.algebricks.core.algebra.base.Counter;
-
-public class AbstractSqlppExpressionScopingVisitor extends AbstractSqlppSimpleExpressionVisitor {
-
-    protected final ScopeChecker scopeChecker = new ScopeChecker();
-    protected final LangRewritingContext context;
-
-    /**
-     * @param context,
-     *            manages ids of variables and guarantees uniqueness of variables.
-     */
-    public AbstractSqlppExpressionScopingVisitor(LangRewritingContext context) {
-        this.context = context;
-        this.scopeChecker.setVarCounter(new Counter(context.getVarCounter()));
-    }
-
-    @Override
-    public Expression visit(FromClause fromClause, Expression arg) throws AsterixException {
-        scopeChecker.extendCurrentScope();
-        for (FromTerm fromTerm : fromClause.getFromTerms()) {
-            fromTerm.accept(this, arg);
-        }
-        return null;
-    }
-
-    @Override
-    public Expression visit(FromTerm fromTerm, Expression arg) throws AsterixException {
-        scopeChecker.createNewScope();
-        // Visit the left expression of a from term.
-        fromTerm.setLeftExpression(fromTerm.getLeftExpression().accept(this, arg));
-
-        // Registers the data item variable.
-        VariableExpr leftVar = fromTerm.getLeftVariable();
-        scopeChecker.getCurrentScope().addNewVarSymbolToScope(leftVar.getVar());
-
-        // Registers the positional variable
-        if (fromTerm.hasPositionalVariable()) {
-            VariableExpr posVar = fromTerm.getPositionalVariable();
-            scopeChecker.getCurrentScope().addNewVarSymbolToScope(posVar.getVar());
-        }
-        // Visits join/unnest/nest clauses.
-        for (AbstractBinaryCorrelateClause correlateClause : fromTerm.getCorrelateClauses()) {
-            correlateClause.accept(this, arg);
-        }
-        return null;
-    }
-
-    @Override
-    public Expression visit(JoinClause joinClause, Expression arg) throws AsterixException {
-        Scope backupScope = scopeChecker.removeCurrentScope();
-        Scope parentScope = scopeChecker.getCurrentScope();
-        scopeChecker.createNewScope();
-        // NOTE: the two join branches cannot be correlated, instead of checking
-        // the correlation here,
-        // we defer the check to the query optimizer.
-        joinClause.setRightExpression(joinClause.getRightExpression().accept(this, arg));
-
-        // Registers the data item variable.
-        VariableExpr rightVar = joinClause.getRightVariable();
-        scopeChecker.getCurrentScope().addNewVarSymbolToScope(rightVar.getVar());
-
-        if (joinClause.hasPositionalVariable()) {
-            // Registers the positional variable.
-            VariableExpr posVar = joinClause.getPositionalVariable();
-            scopeChecker.getCurrentScope().addNewVarSymbolToScope(posVar.getVar());
-        }
-
-        Scope rightScope = scopeChecker.removeCurrentScope();
-        Scope mergedScope = new Scope(scopeChecker, parentScope);
-        mergedScope.merge(backupScope);
-        mergedScope.merge(rightScope);
-        scopeChecker.pushExistingScope(mergedScope);
-        // The condition expression can refer to the just registered variables
-        // for the right branch.
-        joinClause.setConditionExpression(joinClause.getConditionExpression().accept(this, arg));
-        return null;
-    }
-
-    @Override
-    public Expression visit(NestClause nestClause, Expression arg) throws AsterixException {
-        // NOTE: the two branches of a NEST cannot be correlated, instead of
-        // checking the correlation here, we defer the check to the query
-        // optimizer.
-        nestClause.setRightExpression(nestClause.getRightExpression().accept(this, arg));
-
-        // Registers the data item variable.
-        VariableExpr rightVar = nestClause.getRightVariable();
-        scopeChecker.getCurrentScope().addNewVarSymbolToScope(rightVar.getVar());
-
-        if (nestClause.hasPositionalVariable()) {
-            // Registers the positional variable.
-            VariableExpr posVar = nestClause.getPositionalVariable();
-            scopeChecker.getCurrentScope().addNewVarSymbolToScope(posVar.getVar());
-        }
-
-        // The condition expression can refer to the just registered variables
-        // for the right branch.
-        nestClause.setConditionExpression(nestClause.getConditionExpression().accept(this, arg));
-        return null;
-    }
-
-    @Override
-    public Expression visit(UnnestClause unnestClause, Expression arg) throws AsterixException {
-        unnestClause.setRightExpression(unnestClause.getRightExpression().accept(this, arg));
-
-        // register the data item variable
-        VariableExpr rightVar = unnestClause.getRightVariable();
-        scopeChecker.getCurrentScope().addNewVarSymbolToScope(rightVar.getVar());
-
-        if (unnestClause.hasPositionalVariable()) {
-            // register the positional variable
-            VariableExpr posVar = unnestClause.getPositionalVariable();
-            scopeChecker.getCurrentScope().addNewVarSymbolToScope(posVar.getVar());
-        }
-        return null;
-    }
-
-    @Override
-    public Expression visit(SelectSetOperation selectSetOperation, Expression arg) throws AsterixException {
-        selectSetOperation.getLeftInput().accept(this, arg);
-        for (SetOperationRight right : selectSetOperation.getRightInputs()) {
-            scopeChecker.createNewScope();
-            right.getSetOperationRightInput().accept(this, arg);
-        }
-        return null;
-    }
-
-    @Override
-    public Expression visit(Query q, Expression arg) throws AsterixException {
-        q.setBody(q.getBody().accept(this, arg));
-        q.setVarCounter(scopeChecker.getVarCounter());
-        context.setVarCounter(scopeChecker.getVarCounter());
-        return null;
-    }
-
-    @Override
-    public Expression visit(FunctionDecl fd, Expression arg) throws AsterixException {
-        scopeChecker.createNewScope();
-        fd.setFuncBody(fd.getFuncBody().accept(this, arg));
-        scopeChecker.removeCurrentScope();
-        return null;
-    }
-
-    @Override
-    public Expression visit(GroupbyClause gc, Expression arg) throws AsterixException {
-        Scope newScope = scopeChecker.extendCurrentScopeNoPush(true);
-        // Puts all group-by variables into the symbol set of the new scope.
-        for (GbyVariableExpressionPair gbyVarExpr : gc.getGbyPairList()) {
-            gbyVarExpr.setExpr(gbyVarExpr.getExpr().accept(this, arg));
-            VariableExpr gbyVar = gbyVarExpr.getVar();
-            if (gbyVar != null) {
-                newScope.addNewVarSymbolToScope(gbyVarExpr.getVar().getVar());
-            }
-        }
-        for (VariableExpr withVar : gc.getWithVarList()) {
-            newScope.addNewVarSymbolToScope(withVar.getVar());
-        }
-        scopeChecker.replaceCurrentScope(newScope);
-        return null;
-    }
-
-    @Override
-    public Expression visit(LimitClause limitClause, Expression arg) throws AsterixException {
-        scopeChecker.pushForbiddenScope(scopeChecker.getCurrentScope());
-        limitClause.setLimitExpr(limitClause.getLimitExpr().accept(this, arg));
-        scopeChecker.popForbiddenScope();
-        return null;
-    }
-
-    @Override
-    public Expression visit(LetClause letClause, Expression arg) throws AsterixException {
-        scopeChecker.extendCurrentScope();
-        letClause.setBindingExpr(letClause.getBindingExpr().accept(this, arg));
-        scopeChecker.getCurrentScope().addNewVarSymbolToScope(letClause.getVarExpr().getVar());
-        return null;
-    }
-
-    @Override
-    public Expression visit(SelectExpression selectExpression, Expression arg) throws AsterixException {
-        Scope scopeBeforeSelectExpression = scopeChecker.getCurrentScope();
-        scopeChecker.createNewScope();
-
-        // visit let list
-        if (selectExpression.hasLetClauses()) {
-            for (LetClause letClause : selectExpression.getLetList()) {
-                letClause.accept(this, arg);
-            }
-        }
-
-        // visit the main select.
-        selectExpression.getSelectSetOperation().accept(this, selectExpression);
-
-        // visit order by
-        if (selectExpression.hasOrderby()) {
-            selectExpression.getOrderbyClause().accept(this, arg);
-        }
-
-        // visit limit
-        if (selectExpression.hasLimit()) {
-            selectExpression.getLimitClause().accept(this, arg);
-        }
-
-        // Exit scopes that were entered within this select expression
-        while (scopeChecker.getCurrentScope() != scopeBeforeSelectExpression) {
-            scopeChecker.removeCurrentScope();
-        }
-        return selectExpression;
-    }
-
-    @Override
-    public Expression visit(QuantifiedExpression qe, Expression arg) throws AsterixException {
-        scopeChecker.createNewScope();
-        for (QuantifiedPair pair : qe.getQuantifiedList()) {
-            scopeChecker.getCurrentScope().addNewVarSymbolToScope(pair.getVarExpr().getVar());
-            pair.setExpr(pair.getExpr().accept(this, arg));
-        }
-        qe.setSatisfiesExpr(qe.getSatisfiesExpr().accept(this, arg));
-        scopeChecker.removeCurrentScope();
-        return qe;
-    }
-
-    @Override
-    public Expression visit(VariableExpr varExpr, Expression arg) throws AsterixException {
-        String varName = varExpr.getVar().getValue();
-        if (scopeChecker.isInForbiddenScopes(varName)) {
-            throw new AsterixException(
-                    "Inside limit clauses, it is disallowed to reference a variable having the same name as any variable bound in the same scope as the limit clause.");
-        }
-        Identifier ident = scopeChecker.lookupSymbol(varName);
-        if (ident != null) {
-            // Exists such an identifier, then this is a variable reference instead of a variable
-            // definition.
-            varExpr.setIsNewVar(false);
-            varExpr.setVar((VarIdentifier) ident);
-        }
-        return varExpr;
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d630d1a2/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/visitor/base/AbstractSqlppSimpleExpressionVisitor.java
----------------------------------------------------------------------
diff --git a/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/visitor/base/AbstractSqlppSimpleExpressionVisitor.java b/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/visitor/base/AbstractSqlppSimpleExpressionVisitor.java
deleted file mode 100644
index 18c2789..0000000
--- a/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/visitor/base/AbstractSqlppSimpleExpressionVisitor.java
+++ /dev/null
@@ -1,347 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.asterix.lang.sqlpp.visitor.base;
-
-import java.util.ArrayList;
-import java.util.List;
-
-import org.apache.asterix.common.exceptions.AsterixException;
-import org.apache.asterix.lang.common.base.Expression;
-import org.apache.asterix.lang.common.clause.GroupbyClause;
-import org.apache.asterix.lang.common.clause.LetClause;
-import org.apache.asterix.lang.common.clause.LimitClause;
-import org.apache.asterix.lang.common.clause.OrderbyClause;
-import org.apache.asterix.lang.common.clause.WhereClause;
-import org.apache.asterix.lang.common.expression.CallExpr;
-import org.apache.asterix.lang.common.expression.FieldAccessor;
-import org.apache.asterix.lang.common.expression.FieldBinding;
-import org.apache.asterix.lang.common.expression.GbyVariableExpressionPair;
-import org.apache.asterix.lang.common.expression.IfExpr;
-import org.apache.asterix.lang.common.expression.IndexAccessor;
-import org.apache.asterix.lang.common.expression.ListConstructor;
-import org.apache.asterix.lang.common.expression.LiteralExpr;
-import org.apache.asterix.lang.common.expression.OperatorExpr;
-import org.apache.asterix.lang.common.expression.QuantifiedExpression;
-import org.apache.asterix.lang.common.expression.RecordConstructor;
-import org.apache.asterix.lang.common.expression.UnaryExpr;
-import org.apache.asterix.lang.common.expression.VariableExpr;
-import org.apache.asterix.lang.common.statement.FunctionDecl;
-import org.apache.asterix.lang.common.statement.Query;
-import org.apache.asterix.lang.common.struct.QuantifiedPair;
-import org.apache.asterix.lang.sqlpp.clause.AbstractBinaryCorrelateClause;
-import org.apache.asterix.lang.sqlpp.clause.FromClause;
-import org.apache.asterix.lang.sqlpp.clause.FromTerm;
-import org.apache.asterix.lang.sqlpp.clause.HavingClause;
-import org.apache.asterix.lang.sqlpp.clause.JoinClause;
-import org.apache.asterix.lang.sqlpp.clause.NestClause;
-import org.apache.asterix.lang.sqlpp.clause.Projection;
-import org.apache.asterix.lang.sqlpp.clause.SelectBlock;
-import org.apache.asterix.lang.sqlpp.clause.SelectClause;
-import org.apache.asterix.lang.sqlpp.clause.SelectElement;
-import org.apache.asterix.lang.sqlpp.clause.SelectRegular;
-import org.apache.asterix.lang.sqlpp.clause.SelectSetOperation;
-import org.apache.asterix.lang.sqlpp.clause.UnnestClause;
-import org.apache.asterix.lang.sqlpp.expression.SelectExpression;
-import org.apache.asterix.lang.sqlpp.struct.SetOperationRight;
-
-public class AbstractSqlppSimpleExpressionVisitor extends AbstractSqlppQueryExpressionVisitor<Expression, Expression> {
-
-    @Override
-    public Expression visit(FromClause fromClause, Expression arg) throws AsterixException {
-        for (FromTerm fromTerm : fromClause.getFromTerms()) {
-            fromTerm.accept(this, arg);
-        }
-        return null;
-    }
-
-    @Override
-    public Expression visit(FromTerm fromTerm, Expression arg) throws AsterixException {
-        // Visit the left expression of a from term.
-        fromTerm.setLeftExpression(fromTerm.getLeftExpression().accept(this, arg));
-
-        // Visits join/unnest/nest clauses.
-        for (AbstractBinaryCorrelateClause correlateClause : fromTerm.getCorrelateClauses()) {
-            correlateClause.accept(this, arg);
-        }
-        return null;
-    }
-
-    @Override
-    public Expression visit(JoinClause joinClause, Expression arg) throws AsterixException {
-        joinClause.setRightExpression(joinClause.getRightExpression().accept(this, arg));
-        joinClause.setConditionExpression(joinClause.getConditionExpression().accept(this, arg));
-        return null;
-    }
-
-    @Override
-    public Expression visit(NestClause nestClause, Expression arg) throws AsterixException {
-        nestClause.setRightExpression(nestClause.getRightExpression().accept(this, arg));
-        nestClause.setConditionExpression(nestClause.getConditionExpression().accept(this, arg));
-        return null;
-    }
-
-    @Override
-    public Expression visit(UnnestClause unnestClause, Expression arg) throws AsterixException {
-        unnestClause.setRightExpression(unnestClause.getRightExpression().accept(this, arg));
-        return null;
-    }
-
-    @Override
-    public Expression visit(Projection projection, Expression arg) throws AsterixException {
-        projection.setExpression(projection.getExpression().accept(this, arg));
-        return null;
-    }
-
-    @Override
-    public Expression visit(SelectBlock selectBlock, Expression arg) throws AsterixException {
-        // Traverses the select block in the order of "from", "let"s, "where",
-        // "group by", "let"s, "having" and "select".
-        if (selectBlock.hasFromClause()) {
-            selectBlock.getFromClause().accept(this, arg);
-        }
-        if (selectBlock.hasLetClauses()) {
-            List<LetClause> letList = selectBlock.getLetList();
-            for (LetClause letClause : letList) {
-                letClause.accept(this, arg);
-            }
-        }
-        if (selectBlock.hasWhereClause()) {
-            selectBlock.getWhereClause().accept(this, arg);
-        }
-        if (selectBlock.hasGroupbyClause()) {
-            selectBlock.getGroupbyClause().accept(this, arg);
-        }
-        if (selectBlock.hasLetClausesAfterGroupby()) {
-            List<LetClause> letListAfterGby = selectBlock.getLetListAfterGroupby();
-            for (LetClause letClauseAfterGby : letListAfterGby) {
-                letClauseAfterGby.accept(this, arg);
-            }
-        }
-        if (selectBlock.hasHavingClause()) {
-            selectBlock.getHavingClause().accept(this, arg);
-        }
-        selectBlock.getSelectClause().accept(this, arg);
-        return null;
-    }
-
-    @Override
-    public Expression visit(SelectClause selectClause, Expression arg) throws AsterixException {
-        if (selectClause.selectElement()) {
-            selectClause.getSelectElement().accept(this, arg);
-        }
-        if (selectClause.selectRegular()) {
-            selectClause.getSelectRegular().accept(this, arg);
-        }
-        return null;
-    }
-
-    @Override
-    public Expression visit(SelectElement selectElement, Expression arg) throws AsterixException {
-        selectElement.setExpression(selectElement.getExpression().accept(this, arg));
-        return null;
-    }
-
-    @Override
-    public Expression visit(SelectRegular selectRegular, Expression arg) throws AsterixException {
-        for (Projection projection : selectRegular.getProjections()) {
-            projection.accept(this, arg);
-        }
-        return null;
-    }
-
-    @Override
-    public Expression visit(SelectSetOperation selectSetOperation, Expression arg) throws AsterixException {
-        selectSetOperation.getLeftInput().accept(this, arg);
-        for (SetOperationRight right : selectSetOperation.getRightInputs()) {
-            right.getSetOperationRightInput().accept(this, arg);
-        }
-        return null;
-    }
-
-    @Override
-    public Expression visit(HavingClause havingClause, Expression arg) throws AsterixException {
-        havingClause.setFilterExpression(havingClause.getFilterExpression().accept(this, arg));
-        return null;
-    }
-
-    @Override
-    public Expression visit(Query q, Expression arg) throws AsterixException {
-        q.setBody(q.getBody().accept(this, arg));
-        return null;
-    }
-
-    @Override
-    public Expression visit(FunctionDecl fd, Expression arg) throws AsterixException {
-        fd.setFuncBody(fd.getFuncBody().accept(this, arg));
-        return null;
-    }
-
-    @Override
-    public Expression visit(WhereClause whereClause, Expression arg) throws AsterixException {
-        whereClause.setWhereExpr(whereClause.getWhereExpr().accept(this, arg));
-        return null;
-    }
-
-    @Override
-    public Expression visit(OrderbyClause oc, Expression arg) throws AsterixException {
-        List<Expression> newOrderbyList = new ArrayList<Expression>();
-        for (Expression orderExpr : oc.getOrderbyList()) {
-            newOrderbyList.add(orderExpr.accept(this, arg));
-        }
-        oc.setOrderbyList(newOrderbyList);
-        return null;
-    }
-
-    @Override
-    public Expression visit(GroupbyClause gc, Expression arg) throws AsterixException {
-        for (GbyVariableExpressionPair gbyVarExpr : gc.getGbyPairList()) {
-            gbyVarExpr.setExpr(gbyVarExpr.getExpr().accept(this, arg));
-        }
-        return null;
-    }
-
-    @Override
-    public Expression visit(LimitClause limitClause, Expression arg) throws AsterixException {
-        limitClause.setLimitExpr(limitClause.getLimitExpr().accept(this, arg));
-        if (limitClause.hasOffset()) {
-            limitClause.setOffset(limitClause.getOffset().accept(this, arg));
-        }
-        return null;
-    }
-
-    @Override
-    public Expression visit(LetClause letClause, Expression arg) throws AsterixException {
-        letClause.setBindingExpr(letClause.getBindingExpr().accept(this, arg));
-        return null;
-    }
-
-    @Override
-    public Expression visit(SelectExpression selectExpression, Expression arg) throws AsterixException {
-        // visit let list
-        if (selectExpression.hasLetClauses()) {
-            for (LetClause letClause : selectExpression.getLetList()) {
-                letClause.accept(this, arg);
-            }
-        }
-
-        // visit the main select.
-        selectExpression.getSelectSetOperation().accept(this, arg);
-
-        // visit order by
-        if (selectExpression.hasOrderby()) {
-            for (Expression orderExpr : selectExpression.getOrderbyClause().getOrderbyList()) {
-                orderExpr.accept(this, arg);
-            }
-        }
-
-        // visit limit
-        if (selectExpression.hasLimit()) {
-            selectExpression.getLimitClause().accept(this, arg);
-        }
-        return selectExpression;
-    }
-
-    @Override
-    public Expression visit(LiteralExpr l, Expression arg) throws AsterixException {
-        return l;
-    }
-
-    @Override
-    public Expression visit(ListConstructor lc, Expression arg) throws AsterixException {
-        List<Expression> newExprList = new ArrayList<Expression>();
-        for (Expression expr : lc.getExprList()) {
-            newExprList.add(expr.accept(this, arg));
-        }
-        lc.setExprList(newExprList);
-        return lc;
-    }
-
-    @Override
-    public Expression visit(RecordConstructor rc, Expression arg) throws AsterixException {
-        for (FieldBinding binding : rc.getFbList()) {
-            binding.setLeftExpr(binding.getLeftExpr().accept(this, arg));
-            binding.setRightExpr(binding.getRightExpr().accept(this, arg));
-        }
-        return rc;
-    }
-
-    @Override
-    public Expression visit(OperatorExpr operatorExpr, Expression arg) throws AsterixException {
-        List<Expression> newExprList = new ArrayList<Expression>();
-        for (Expression expr : operatorExpr.getExprList()) {
-            newExprList.add(expr.accept(this, arg));
-        }
-        operatorExpr.setExprList(newExprList);
-        return operatorExpr;
-    }
-
-    @Override
-    public Expression visit(IfExpr ifExpr, Expression arg) throws AsterixException {
-        ifExpr.setCondExpr(ifExpr.getCondExpr().accept(this, arg));
-        ifExpr.setThenExpr(ifExpr.getThenExpr().accept(this, arg));
-        ifExpr.setElseExpr(ifExpr.getElseExpr().accept(this, arg));
-        return ifExpr;
-    }
-
-    @Override
-    public Expression visit(QuantifiedExpression qe, Expression arg) throws AsterixException {
-        for (QuantifiedPair pair : qe.getQuantifiedList()) {
-            pair.setExpr(pair.getExpr().accept(this, arg));
-        }
-        qe.setSatisfiesExpr(qe.getSatisfiesExpr().accept(this, arg));
-        return qe;
-    }
-
-    @Override
-    public Expression visit(CallExpr callExpr, Expression arg) throws AsterixException {
-        List<Expression> newExprList = new ArrayList<Expression>();
-        for (Expression expr : callExpr.getExprList()) {
-            newExprList.add(expr.accept(this, arg));
-        }
-        callExpr.setExprList(newExprList);
-        return callExpr;
-    }
-
-    @Override
-    public Expression visit(VariableExpr varExpr, Expression arg) throws AsterixException {
-        return varExpr;
-    }
-
-    @Override
-    public Expression visit(UnaryExpr u, Expression arg) throws AsterixException {
-        u.setExpr(u.getExpr().accept(this, arg));
-        return u;
-    }
-
-    @Override
-    public Expression visit(FieldAccessor fa, Expression arg) throws AsterixException {
-        fa.setExpr(fa.getExpr().accept(this, arg));
-        return fa;
-    }
-
-    @Override
-    public Expression visit(IndexAccessor ia, Expression arg) throws AsterixException {
-        ia.setExpr(ia.getExpr().accept(this, arg));
-        if (ia.getIndexExpr() != null) {
-            ia.setIndexExpr(ia.getIndexExpr());
-        }
-        return ia;
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d630d1a2/asterixdb/asterix-app/src/test/resources/runtimets/queries/external-library/classad-parser-new/classad-parser-new.1.ddl.aql
----------------------------------------------------------------------
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries/external-library/classad-parser-new/classad-parser-new.1.ddl.aql b/asterixdb/asterix-app/src/test/resources/runtimets/queries/external-library/classad-parser-new/classad-parser-new.1.ddl.aql
new file mode 100644
index 0000000..21c8ac6
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries/external-library/classad-parser-new/classad-parser-new.1.ddl.aql
@@ -0,0 +1,31 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*
+ * Description  : Create an adapter that uses external parser to parse data from files
+ * Expected Res : Success
+ * Date         : Feb, 09, 2016
+ */
+
+drop dataverse externallibtest if exists;
+create dataverse externallibtest;
+use dataverse externallibtest;
+
+create type Classad as open {
+GlobalJobId: string
+};

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d630d1a2/asterixdb/asterix-app/src/test/resources/runtimets/queries/external-library/classad-parser-new/classad-parser-new.2.lib.aql
----------------------------------------------------------------------
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries/external-library/classad-parser-new/classad-parser-new.2.lib.aql b/asterixdb/asterix-app/src/test/resources/runtimets/queries/external-library/classad-parser-new/classad-parser-new.2.lib.aql
new file mode 100644
index 0000000..0290611
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries/external-library/classad-parser-new/classad-parser-new.2.lib.aql
@@ -0,0 +1,19 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+install externallibtest testlib src/test/resources/externallib/testlib-zip-binary-assembly.zip
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d630d1a2/asterixdb/asterix-app/src/test/resources/runtimets/queries/external-library/classad-parser-new/classad-parser-new.3.ddl.aql
----------------------------------------------------------------------
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries/external-library/classad-parser-new/classad-parser-new.3.ddl.aql b/asterixdb/asterix-app/src/test/resources/runtimets/queries/external-library/classad-parser-new/classad-parser-new.3.ddl.aql
new file mode 100644
index 0000000..9a7f043
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries/external-library/classad-parser-new/classad-parser-new.3.ddl.aql
@@ -0,0 +1,27 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+use dataverse externallibtest;
+
+create external dataset Condor(Classad) using localfs(
+("path"="asterix_nc1://data/external-parser/jobads.new"),
+("format"="semi-structured"),
+("record-start"="["),
+("record-end"="]"),
+("parser"="testlib#org.apache.asterix.external.library.ClassAdParserFactory"));

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d630d1a2/asterixdb/asterix-app/src/test/resources/runtimets/queries/external-library/classad-parser-new/classad-parser-new.4.query.aql
----------------------------------------------------------------------
diff --git a/asterixdb/asterix-app/src/test/resources/runtimets/queries/external-library/classad-parser-new/classad-parser-new.4.query.aql b/asterixdb/asterix-app/src/test/resources/runtimets/queries/external-library/classad-parser-new/classad-parser-new.4.query.aql
new file mode 100644
index 0000000..9d5d499
--- /dev/null
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries/external-library/classad-parser-new/classad-parser-new.4.query.aql
@@ -0,0 +1,23 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+use dataverse externallibtest;
+
+for $x in dataset Condor
+order by $x.GlobalJobId
+return $x;



[27/50] [abbrv] incubator-asterixdb git commit: Merge branch 'master' into hyracks-merge2

Posted by im...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/load/issue289_query/issue289_query.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/load/issue289_query/issue289_query.3.query.sqlpp
index 50b4066,0000000..e3b0fc7
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/load/issue289_query/issue289_query.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/load/issue289_query/issue289_query.3.query.sqlpp
@@@ -1,31 -1,0 +1,31 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +/*
 + * Description  : Load dataset with float numbers containing "E-4f"
 + * Expected Res : Success
 + * Date         : 01 Apr 2013
 + */
 +
 +use test;
 +
 +
- select element test.count((
++select element test.coll_count((
 +    select element l
 +    from  Customers as l
 +));

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/materialization/assign-reuse/assign-reuse.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/materialization/assign-reuse/assign-reuse.3.query.sqlpp
index 390f6c9,0000000..ab4ad3d
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/materialization/assign-reuse/assign-reuse.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/materialization/assign-reuse/assign-reuse.3.query.sqlpp
@@@ -1,38 -1,0 +1,38 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +
 +use TinySocial;
 +
 +
 +with  lonelyusers as (
 +      select element d
 +      from  FacebookUsers as d
-       where (TinySocial.count(d."friend-ids") < 2)
++      where (TinySocial.coll_count(d."friend-ids") < 2)
 +  ),
 +      lonelyusers2 as (
 +      select element d
 +      from  FacebookUsers as d
-       where (TinySocial.count(d."friend-ids") < 2)
++      where (TinySocial.coll_count(d."friend-ids") < 2)
 +  )
 +select element {'user1':{'id':l1.id,'name':l1.name},'user2':{'id':l2.id,'name':l2.name}}
 +from  lonelyusers as l1,
 +      lonelyusers2 as l2
 +where (l1.id < l2.id)
 +order by l1.id,l2.id
 +;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/misc/groupby-orderby-count/groupby-orderby-count.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/misc/groupby-orderby-count/groupby-orderby-count.3.query.sqlpp
index 3ff8376,0000000..924fb2c
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/misc/groupby-orderby-count/groupby-orderby-count.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/misc/groupby-orderby-count/groupby-orderby-count.3.query.sqlpp
@@@ -1,28 -1,0 +1,28 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +
 +use twitter;
 +
 +
- select element {'word':tok,'count':twitter.count(token)}
++select element {'word':tok,'count':twitter.coll_count(token)}
 +from  TwitterData as t,
 +      twitter."word-tokens"(t.text) as token
 +group by token as tok
- order by twitter.count(token) desc,tok
++order by twitter.coll_count(token) desc,tok
 +;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/open-closed/query-issue258/query-issue258.2.update.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/open-closed/query-issue258/query-issue258.2.update.sqlpp
index 16c53eb,0000000..b662786
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/open-closed/query-issue258/query-issue258.2.update.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/open-closed/query-issue258/query-issue258.2.update.sqlpp
@@@ -1,36 -1,0 +1,36 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +/*
 + * Description  : This test case is to verify the fix for issue258
 +                 : https://code.google.com/p/asterixdb/issues/detail?id=258
 + * Expected Res : Success
 + * Date         : 21 May 2013
 + */
 +
 +use test;
 +
 +
 +insert into ds1
- if ((test.count((
++if ((coll_count((
 +      select element x
 +      from  ds2 as x
 +      where (x.id = 10)
 +  )) <= 0))
 +then {'id':10}
 +else {'id':5};

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/quantifiers/everysat_03/everysat_03.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/quantifiers/everysat_03/everysat_03.3.query.sqlpp
index 04e7bfa,0000000..f83a9e9
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/quantifiers/everysat_03/everysat_03.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/quantifiers/everysat_03/everysat_03.3.query.sqlpp
@@@ -1,38 -1,0 +1,38 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +/*
 + * Description      : Test Quantified Expressions
 + *                  : every <variable-name> in [ordered-list], <variable-name> in [ordered-list] satisfies expression
 + * Expected Result  : Success
 + * Date             : 5th July 2012
 + */
 +
- with  a as [every x in [1,2] satisfies (avg([x,1]) = 1),every x in ['1','2'] satisfies (string(x) = '1'),every x in ['1','2'] satisfies ("string-length"(x) = 1),every x in [[1,2],[10],[1,5,7,8]] satisfies (count(x) = 1),every x in [[2],[10],[8]] satisfies (count(x) = 1),every x in [true,false] satisfies boolean('true'),every x in [true,true] satisfies not(x),every x in [1,2,3],
++with  a as [every x in [1,2] satisfies (coll_avg([x,1]) = 1),every x in ['1','2'] satisfies (string(x) = '1'),every x in ['1','2'] satisfies ("string-length"(x) = 1),every x in [[1,2],[10],[1,5,7,8]] satisfies (coll_count(x) = 1),every x in [[2],[10],[8]] satisfies (coll_count(x) = 1),every x in [true,false] satisfies boolean('true'),every x in [true,true] satisfies not(x),every x in [1,2,3],
 +y in [4,5,6] satisfies ((x + y) = 5),every x in [1,2,3],
 +y in [4,5,6] satisfies ((x - y) = 5),every x in [1,2,3],
 +y in [4,5,6] satisfies ((x * y) = 10),every x in ['ab','cd'],
 +y in ['ab','de'] satisfies (string(x) = string(y)),every x in [1,2,3],
 +y in [4,5,6] satisfies (int32(x) = int32(y)),every x in [1,2,3],
 +y in [4,5,6] satisfies (float(x) = float(y)),every x in [1,2,3],
 +y in [4,5,6] satisfies (double(x) = double(y)),every x in ['true','false'],
 +y in ['false','true'] satisfies (boolean(x) = boolean(y)),every x in ['1980-05-05T13:13:13Z','1980-05-05T13:13:13Z'],
 +y in ['1980-05-05T13:13:13Z','1980-05-05T13:13:13Z'] satisfies (datetime(x) = datetime(y))]
 +select element i
 +from  a as i
 +;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/quantifiers/somesat_03/somesat_03.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/quantifiers/somesat_03/somesat_03.3.query.sqlpp
index 61317e8,0000000..5274bb5
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/quantifiers/somesat_03/somesat_03.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/quantifiers/somesat_03/somesat_03.3.query.sqlpp
@@@ -1,28 -1,0 +1,28 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +/*
 + * Description     : Test quantified expressions; some variable in [ordered list] satisfies expression.
 + * Expected Result : Success
 + * Date            : 6th July 2012
 + */
 +
- with  a as [some x in [1,2] satisfies ((x + x) = 3),some x in [1,2] satisfies ((x + x) = 2),some x in [1,2] satisfies ((x - 2) = 2),some x in [1,2] satisfies ((x - 2) = 0),some x in [1,2] satisfies ((x * 2) = 4),some x in [1,2] satisfies ((x / 2) = 1),some x in [1,2] satisfies (avg([x,1]) = 1),some x in [1,2] satisfies boolean('true'),some x in [1,2] satisfies boolean('false'),some x in [true,false] satisfies not(x),some x in [1,2] satisfies ((x = 1) or (x = 2)),some x in [1,2] satisfies ((x = 1) and ((x + 1) = 2))]
++with  a as [some x in [1,2] satisfies ((x + x) = 3),some x in [1,2] satisfies ((x + x) = 2),some x in [1,2] satisfies ((x - 2) = 2),some x in [1,2] satisfies ((x - 2) = 0),some x in [1,2] satisfies ((x * 2) = 4),some x in [1,2] satisfies ((x / 2) = 1),some x in [1,2] satisfies (coll_avg([x,1]) = 1),some x in [1,2] satisfies boolean('true'),some x in [1,2] satisfies boolean('false'),some x in [true,false] satisfies not(x),some x in [1,2] satisfies ((x = 1) or (x = 2)),some x in [1,2] satisfies ((x = 1) and ((x + 1) = 2))]
 +select element i
 +from  a as i
 +;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/quantifiers/somesat_04/somesat_04.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/quantifiers/somesat_04/somesat_04.3.query.sqlpp
index c29caa7,0000000..3b284aa
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/quantifiers/somesat_04/somesat_04.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/quantifiers/somesat_04/somesat_04.3.query.sqlpp
@@@ -1,34 -1,0 +1,34 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +/*
 + * Description      : Test Quantified Expressions
 + *                  : some <variable-name> in [ordered-list] satisfies function expression
 + *                  : some <variable-name> in [ordered-list],<variable-name> in [ordered-list] satisfies expression
 + * Expected Result  : Success
 + * Date             : 5th July 2012
 + */
 +
- with  a as [some x in ['foo','foobar','foot','fox'] satisfies ("string-length"(x) = 3),some x in [[5,4,3,2],[1,2,3,4,5,6,7,8],[4,2,3,4]] satisfies (count(x) = 8),some x in [1,2] satisfies ((x = 1) or (x = 2)),some x in [1,2] satisfies ((x = 1) and ((x + 1) = 2)),some x in ['A','B','C'] satisfies (x = 'A'),some x in [1,2,3],
++with  a as [some x in ['foo','foobar','foot','fox'] satisfies ("string-length"(x) = 3),some x in [[5,4,3,2],[1,2,3,4,5,6,7,8],[4,2,3,4]] satisfies (coll_count(x) = 8),some x in [1,2] satisfies ((x = 1) or (x = 2)),some x in [1,2] satisfies ((x = 1) and ((x + 1) = 2)),some x in ['A','B','C'] satisfies (x = 'A'),some x in [1,2,3],
 +y in [4,5,6] satisfies ((x + y) = 5),some x in [1,2,3],
 +y in [4,5,6] satisfies ((x - y) = 5),some x in [1,2,3],
 +y in [4,5,6] satisfies ((x * y) = 10),some x in [1,2,3],
 +y in [4,5,6] satisfies ((x / y) = 2)]
 +select element i
 +from  a as i
 +;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/semistructured/count-nullable/count-nullable.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/semistructured/count-nullable/count-nullable.3.query.sqlpp
index b2d8f6f,0000000..7102a14
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/semistructured/count-nullable/count-nullable.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/semistructured/count-nullable/count-nullable.3.query.sqlpp
@@@ -1,27 -1,0 +1,27 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +
 +use test;
 +
 +
- select element {'custage':age,'count':test.count(c)}
++select element {'custage':age,'count':test.coll_count(c)}
 +from  Customers as c
 +group by c.age as age
 +order by age
 +;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/spatial/cell-aggregation-with-filtering/cell-aggregation-with-filtering.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/spatial/cell-aggregation-with-filtering/cell-aggregation-with-filtering.3.query.sqlpp
index 5047af8,0000000..c25b87f
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/spatial/cell-aggregation-with-filtering/cell-aggregation-with-filtering.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/spatial/cell-aggregation-with-filtering/cell-aggregation-with-filtering.3.query.sqlpp
@@@ -1,31 -1,0 +1,31 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +
 +use test;
 +
 +
 +select element {'cell':c,'count':num}
 +from  TwitterData as t
 +with  keywords as 'Allergies',
 +      region as test.polygon('\n\t33.80503407287759,-126.41235263538363 \n\t44.9090773200516,-126.41235263538363 \n\t44.9090773200516,-87.65258701038363 \n\t33.80503407287759,-87.65258701038363')
 +where (test."spatial-intersect"(t.loc,region) and (t.time > test.datetime('2011-05-15T00:00:00Z')) and (t.time < test.datetime('2011-05-16T23:59:59Z')) and test.contains(t.text,keywords))
 +group by test."spatial-cell"(t.loc,test."create-point"(24.5,-125.5),3.0,3.0) as c
- with  num as test.count(t)
++with  num as test.coll_count(t)
 +order by num
 +;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/spatial/cell-aggregation/cell-aggregation.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/spatial/cell-aggregation/cell-aggregation.3.query.sqlpp
index 676457c,0000000..0d85a10
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/spatial/cell-aggregation/cell-aggregation.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/spatial/cell-aggregation/cell-aggregation.3.query.sqlpp
@@@ -1,32 -1,0 +1,32 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +
 +use test;
 +
 +
 +with  grid as (
 +      select element {'cell':c,'count':num}
 +      from  MyData as o
 +      group by test."spatial-cell"(o.loc,test."create-point"(0.0,0.0),5.0,5.0) as c
-       with  num as test.count(o)
++      with  num as test.coll_count(o)
 +      order by num
 +  )
 +select element g
 +from  grid as g
 +;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/temp-dataset/q01_pricing_summary_report_nt/q01_pricing_summary_report_nt.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/temp-dataset/q01_pricing_summary_report_nt/q01_pricing_summary_report_nt.3.query.sqlpp
index c9eb48f,0000000..782130a
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/temp-dataset/q01_pricing_summary_report_nt/q01_pricing_summary_report_nt.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/temp-dataset/q01_pricing_summary_report_nt/q01_pricing_summary_report_nt.3.query.sqlpp
@@@ -1,57 -1,0 +1,57 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +/*
 + * Description     : Test a read query over a temporary dataset.
 + * Expected Result : Success
 + * Date            : March 27 2015
 + */
 +
 +use tpch;
 +
 +
 +set "import-private-functions" "true";
 +
- select element {'l_returnflag':l_returnflag,'l_linestatus':l_linestatus,'sum_qty':tpch.sum((
++select element {'l_returnflag':l_returnflag,'l_linestatus':l_linestatus,'sum_qty':tpch.coll_sum((
 +        select element i.l_quantity
 +        from  l as i
-     )),'sum_base_price':tpch.sum((
++    )),'sum_base_price':tpch.coll_sum((
 +        select element i.l_extendedprice
 +        from  l as i
-     )),'sum_disc_price':tpch.sum((
++    )),'sum_disc_price':tpch.coll_sum((
 +        select element (i.l_extendedprice * (1 - i.l_discount))
 +        from  l as i
-     )),'sum_charge':tpch.sum((
++    )),'sum_charge':tpch.coll_sum((
 +        select element (i.l_extendedprice * (1 - i.l_discount) * (1 + i.l_tax))
 +        from  l as i
-     )),'ave_qty':tpch.avg((
++    )),'ave_qty':tpch.coll_avg((
 +        select element i.l_quantity
 +        from  l as i
-     )),'ave_price':tpch.avg((
++    )),'ave_price':tpch.coll_avg((
 +        select element i.l_extendedprice
 +        from  l as i
-     )),'ave_disc':tpch.avg((
++    )),'ave_disc':tpch.coll_avg((
 +        select element i.l_discount
 +        from  l as i
-     )),'count_order':tpch.count(l)}
++    )),'count_order':tpch.coll_count(l)}
 +from  LineItem as l
 +where (l.l_shipdate <= '1998-09-02')
 +/* +hash */
 +group by l.l_returnflag as l_returnflag,l.l_linestatus as l_linestatus
 +order by l_returnflag,l_linestatus
 +;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/temporal/agg_max/agg_max.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/temporal/agg_max/agg_max.3.query.sqlpp
index 6c3ed52,0000000..b67806d
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/temporal/agg_max/agg_max.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/temporal/agg_max/agg_max.3.query.sqlpp
@@@ -1,38 -1,0 +1,38 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +
 +use test;
 +
 +
- {'m0':test.max((
++{'m0':test.coll_max((
 +    select element i.time
 +    from  tsdata as i
- )),'m1':test.max((
++)),'m1':test.coll_max((
 +    select element i.date
 +    from  tsdata as i
- )),'m2':test.max((
++)),'m2':test.coll_max((
 +    select element i.datetime
 +    from  tsdata as i
- )),'m3':test.max((
++)),'m3':test.coll_max((
 +    select element i.dtduration
 +    from  tsdata as i
- )),'m4':test.max((
++)),'m4':test.coll_max((
 +    select element i.ymduration
 +    from  tsdata as i
 +))};

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/temporal/agg_min/agg_min.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/temporal/agg_min/agg_min.3.query.sqlpp
index 08d47ce,0000000..cfc6dd7
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/temporal/agg_min/agg_min.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/temporal/agg_min/agg_min.3.query.sqlpp
@@@ -1,38 -1,0 +1,38 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +
 +use test;
 +
 +
- {'m0':test.min((
++{'m0':test.coll_min((
 +    select element i.time
 +    from  tsdata as i
- )),'m1':test.min((
++)),'m1':test.coll_min((
 +    select element i.date
 +    from  tsdata as i
- )),'m2':test.min((
++)),'m2':test.coll_min((
 +    select element i.datetime
 +    from  tsdata as i
- )),'m3':test.min((
++)),'m3':test.coll_min((
 +    select element i.dtduration
 +    from  tsdata as i
- )),'m4':test.min((
++)),'m4':test.coll_min((
 +    select element i.ymduration
 +    from  tsdata as i
 +))};

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/temporal/overlap_bins_gby_1/overlap_bins_gby_1.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/temporal/overlap_bins_gby_1/overlap_bins_gby_1.3.query.sqlpp
index 86fc34a,0000000..d5b3fc2
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/temporal/overlap_bins_gby_1/overlap_bins_gby_1.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/temporal/overlap_bins_gby_1/overlap_bins_gby_1.3.query.sqlpp
@@@ -1,39 -1,0 +1,39 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +/**
 + * Interval_bin_gby test case: test the group-by using interval-bin function
 + * Expected result: success
 + **/
 +
 +use test;
 +
 +
- select element {'timebin':bin,'count':test.count(i2),'total_ms':test.sum((
++select element {'timebin':bin,'count':test.count(i2),'total_ms':test.coll_sum((
 +        select element test."ms-from-day-time-duration"(test."duration-from-interval"(test."get-overlapping-interval"(bin,i3.interval)))
 +        from  i2 as i3
 +    ))}
 +from  (
 +    select element {'interval':test."interval-start-from-time"(i1.time,i1.duration)}
 +    from  tsdata as i1
 +    order by i1.time
 +) as i2,
 +      test."overlap-bins"(i2.interval,test.time('00:00:00'),test."day-time-duration"('PT1H30M')) as j
 +group by j as bin
 +order by test."get-interval-start"(bin)
 +;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/temporal/overlap_bins_gby_3/overlap_bins_gby_3.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/temporal/overlap_bins_gby_3/overlap_bins_gby_3.3.query.sqlpp
index c996e7d,0000000..933626e
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/temporal/overlap_bins_gby_3/overlap_bins_gby_3.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/temporal/overlap_bins_gby_3/overlap_bins_gby_3.3.query.sqlpp
@@@ -1,38 -1,0 +1,38 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +
 +use multitask;
 +
 +
 +select element {'timebin':bin,'subgroups':(
 +        select element {'subgid':subgid,'item_count':multitask.count(i)}
 +        from  logs as i
 +        where multitask."interval-covers"(bin,multitask."interval-start-from-time"(i.time,multitask."duration-from-ms"(i.duration)))
 +        group by i.app as subgid
 +        order by subgid,multitask.count(i)
 +    )}
- from  multitask."overlap-bins"(multitask.interval(multitask.min((
++from  multitask."overlap-bins"(multitask.interval(multitask.coll_min((
 +    select element i.time
 +    from  logs as i
- )),multitask.max((
++)),multitask.coll_max((
 +    select element (i.time + multitask."duration-from-ms"((i.duration * 1000)))
 +    from  logs as i
 +))),multitask.time('00:00:00.000'),multitask."day-time-duration"('PT1M')) as bin
 +order by multitask."get-interval-start"(bin)
 +;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tinysocial/tinysocial-suite-open/query-ASTERIXDB-1331.25.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tinysocial/tinysocial-suite-open/query-ASTERIXDB-1331.25.query.sqlpp
index 8ccf4ef,0000000..ad65788
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tinysocial/tinysocial-suite-open/query-ASTERIXDB-1331.25.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tinysocial/tinysocial-suite-open/query-ASTERIXDB-1331.25.query.sqlpp
@@@ -1,30 -1,0 +1,30 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +
 +/** This test case is to verify the fix for ASTERIXDB-1331. */
 +
 +USE TinySocial;
 +
- SELECT ELEMENT avg((
- select element "string-length"(message.message)
++SELECT ELEMENT coll_avg((
++select element LENGTH(message.message)
 +FROM FacebookMessages AS message
 +WHERE message."in-response-to" >= 1 and
 +      message."in-response-to" < 11
 +));
 +

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tinysocial/tinysocial-suite-open/tinysocial-suite.14.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tinysocial/tinysocial-suite-open/tinysocial-suite.14.query.sqlpp
index 5770a46,0000000..7ca10bd
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tinysocial/tinysocial-suite-open/tinysocial-suite.14.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tinysocial/tinysocial-suite-open/tinysocial-suite.14.query.sqlpp
@@@ -1,29 -1,0 +1,29 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +/**
 +* Query 8 - Simple Aggregation
 +*/
 +
 +use TinySocial;
 +
 +
- select element TinySocial.count((
++select element coll_count((
 +    select element fbu
 +    from  FacebookUsers as fbu
 +));

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tinysocial/tinysocial-suite-open/tinysocial-suite.22.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tinysocial/tinysocial-suite-open/tinysocial-suite.22.query.sqlpp
index 559658c,0000000..ac378d0
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tinysocial/tinysocial-suite-open/tinysocial-suite.22.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tinysocial/tinysocial-suite-open/tinysocial-suite.22.query.sqlpp
@@@ -1,30 -1,0 +1,30 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +/**
 +* Deleting Existing Data - Verification
 +*/
 +
 +use TinySocial;
 +
 +
- select element TinySocial.count((
++select element coll_count((
 +    select element t
 +    from  TweetMessages as t
 +    where (t.tweetid = '13')
 +));

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tinysocial/tinysocial-suite/tinysocial-suite.14.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tinysocial/tinysocial-suite/tinysocial-suite.14.query.sqlpp
index 5770a46,0000000..7ca10bd
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tinysocial/tinysocial-suite/tinysocial-suite.14.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tinysocial/tinysocial-suite/tinysocial-suite.14.query.sqlpp
@@@ -1,29 -1,0 +1,29 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +/**
 +* Query 8 - Simple Aggregation
 +*/
 +
 +use TinySocial;
 +
 +
- select element TinySocial.count((
++select element coll_count((
 +    select element fbu
 +    from  FacebookUsers as fbu
 +));

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tinysocial/tinysocial-suite/tinysocial-suite.22.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tinysocial/tinysocial-suite/tinysocial-suite.22.query.sqlpp
index 559658c,0000000..ac378d0
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tinysocial/tinysocial-suite/tinysocial-suite.22.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tinysocial/tinysocial-suite/tinysocial-suite.22.query.sqlpp
@@@ -1,30 -1,0 +1,30 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +/**
 +* Deleting Existing Data - Verification
 +*/
 +
 +use TinySocial;
 +
 +
- select element TinySocial.count((
++select element coll_count((
 +    select element t
 +    from  TweetMessages as t
 +    where (t.tweetid = '13')
 +));

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tinysocial/tinysocial-suite/tinysocial-suite.25.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tinysocial/tinysocial-suite/tinysocial-suite.25.query.sqlpp
index 7c22f59,0000000..a4b852d
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tinysocial/tinysocial-suite/tinysocial-suite.25.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tinysocial/tinysocial-suite/tinysocial-suite.25.query.sqlpp
@@@ -1,28 -1,0 +1,28 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +
 +USE TinySocial;
 +
- SELECT ELEMENT avg((
- select element "string-length"(message.message)
++SELECT ELEMENT coll_avg((
++select element LENGTH(message.message)
 +FROM FacebookMessages AS message
 +WHERE message."in-response-to" >= 1 and
 +      message."in-response-to" < 11
 +));
 +

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-sql-like/q01_pricing_summary_report_nt/q01_pricing_summary_report_nt.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-sql-like/q01_pricing_summary_report_nt/q01_pricing_summary_report_nt.3.query.sqlpp
index 94b313f,0000000..a971652
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-sql-like/q01_pricing_summary_report_nt/q01_pricing_summary_report_nt.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-sql-like/q01_pricing_summary_report_nt/q01_pricing_summary_report_nt.3.query.sqlpp
@@@ -1,52 -1,0 +1,52 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +
 +use tpch;
 +
 +
 +set "import-private-functions" "true";
 +
- select element {'l_returnflag':l_returnflag,'l_linestatus':l_linestatus,'sum_qty':tpch.sum((
++select element {'l_returnflag':l_returnflag,'l_linestatus':l_linestatus,'sum_qty':tpch.coll_sum((
 +        select element i.l_quantity
 +        from  l as i
-     )),'sum_base_price':tpch.sum((
++    )),'sum_base_price':tpch.coll_sum((
 +        select element i.l_extendedprice
 +        from  l as i
-     )),'sum_disc_price':tpch.sum((
++    )),'sum_disc_price':tpch.coll_sum((
 +        select element (i.l_extendedprice * (1 - i.l_discount))
 +        from  l as i
-     )),'sum_charge':tpch.sum((
++    )),'sum_charge':tpch.coll_sum((
 +        select element (i.l_extendedprice * (1 - i.l_discount) * (1 + i.l_tax))
 +        from  l as i
-     )),'ave_qty':tpch.avg((
++    )),'ave_qty':tpch.coll_avg((
 +        select element i.l_quantity
 +        from  l as i
-     )),'ave_price':tpch.avg((
++    )),'ave_price':tpch.coll_avg((
 +        select element i.l_extendedprice
 +        from  l as i
-     )),'ave_disc':tpch.avg((
++    )),'ave_disc':tpch.coll_avg((
 +        select element i.l_discount
 +        from  l as i
 +    )),'count_order':tpch.count(l)}
 +from  LineItem as l
 +where (l.l_shipdate <= '1998-09-02')
 +/* +hash */
 +group by l.l_returnflag as l_returnflag,l.l_linestatus as l_linestatus
 +order by l_returnflag,l_linestatus
 +;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-sql-like/q02_minimum_cost_supplier/q02_minimum_cost_supplier.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-sql-like/q02_minimum_cost_supplier/q02_minimum_cost_supplier.3.query.sqlpp
index a55c74d,0000000..499899d
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-sql-like/q02_minimum_cost_supplier/q02_minimum_cost_supplier.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-sql-like/q02_minimum_cost_supplier/q02_minimum_cost_supplier.3.query.sqlpp
@@@ -1,80 -1,0 +1,80 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +
 +use tpch;
 +
 +
 +declare function tmp1() {
 +(
 +    select element {'s_acctbal':pssrn.s_acctbal,'s_name':pssrn.s_name,'n_name':pssrn.n_name,'p_partkey':p.p_partkey,'ps_supplycost':pssrn.ps_supplycost,'p_mfgr':p.p_mfgr,'s_address':pssrn.s_address,'s_phone':pssrn.s_phone,'s_comment':pssrn.s_comment}
 +    from  Part as p,
 +          (
 +        select element {'n_name':srn.n_name,'p_partkey':ps.ps_partkey,'ps_supplycost':ps.ps_supplycost,'s_name':srn.s_name,'s_acctbal':srn.s_acctbal,'s_address':srn.s_address,'s_phone':srn.s_phone,'s_comment':srn.s_comment}
 +        from  Partsupp as ps,
 +              (
 +            select element {'s_suppkey':s.s_suppkey,'n_name':rn.n_name,'s_name':s.s_name,'s_acctbal':s.s_acctbal,'s_address':s.s_address,'s_phone':s.s_phone,'s_comment':s.s_comment}
 +            from  Supplier as s,
 +                  (
 +                select element {'n_nationkey':n.n_nationkey,'n_name':n.n_name}
 +                from  Region as r,
 +                      Nation as n
 +                where ((n.n_regionkey = r.r_regionkey) and (r.r_name = 'EUROPE'))
 +            ) as rn
 +            where (s.s_nationkey = rn.n_nationkey)
 +        ) as srn
 +        where (srn.s_suppkey = ps.ps_suppkey)
 +    ) as pssrn
 +    where ((p.p_partkey = pssrn.p_partkey) and tpch.like(p.p_type,'%BRASS'))
 +)
 +};
 +declare function tmp2() {
 +(
-     select element {'p_partkey':p_partkey,'ps_min_supplycost':tpch.min((
++    select element {'p_partkey':p_partkey,'ps_min_supplycost':tpch.coll_min((
 +            select element i.ps_supplycost
 +            from  pssrn as i
 +        ))}
 +    from  Part as p,
 +          (
 +        select element {'n_name':srn.n_name,'p_partkey':ps.ps_partkey,'ps_supplycost':ps.ps_supplycost,'s_name':srn.s_name,'s_acctbal':srn.s_acctbal,'s_address':srn.s_address,'s_phone':srn.s_phone,'s_comment':srn.s_comment}
 +        from  Partsupp as ps,
 +              (
 +            select element {'s_suppkey':s.s_suppkey,'n_name':rn.n_name,'s_name':s.s_name,'s_acctbal':s.s_acctbal,'s_address':s.s_address,'s_phone':s.s_phone,'s_comment':s.s_comment}
 +            from  Supplier as s,
 +                  (
 +                select element {'n_nationkey':n.n_nationkey,'n_name':n.n_name}
 +                from  Region as r,
 +                      Nation as n
 +                where ((n.n_regionkey = r.r_regionkey) and (r.r_name = 'EUROPE'))
 +            ) as rn
 +            where (s.s_nationkey = rn.n_nationkey)
 +        ) as srn
 +        where (srn.s_suppkey = ps.ps_suppkey)
 +    ) as pssrn
 +    where ((p.p_partkey = pssrn.p_partkey) and tpch.like(p.p_type,'%BRASS'))
 +    /* +hash */
 +    group by pssrn.p_partkey as p_partkey
 +)
 +};
 +select element {'s_acctbal':t1.s_acctbal,'s_name':t1.s_name,'n_name':t1.n_name,'p_partkey':t1.p_partkey,'p_mfgr':t1.p_mfgr,'s_address':t1.s_address,'s_phone':t1.s_phone,'s_comment':t1.s_comment}
 +from  tpch.tmp2() as t2,
 +      tpch.tmp1() as t1
 +where ((t1.p_partkey = t2.p_partkey) and (t1.ps_supplycost = t2.ps_min_supplycost))
 +order by t1.s_acctbal desc,t1.n_name,t1.s_name,t1.p_partkey
 +limit 100
 +;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-sql-like/q03_shipping_priority_nt/q03_shipping_priority_nt.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-sql-like/q03_shipping_priority_nt/q03_shipping_priority_nt.3.query.sqlpp
index d30c6d3,0000000..bddbb53
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-sql-like/q03_shipping_priority_nt/q03_shipping_priority_nt.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-sql-like/q03_shipping_priority_nt/q03_shipping_priority_nt.3.query.sqlpp
@@@ -1,36 -1,0 +1,36 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +
 +use tpch;
 +
 +
 +select element {'l_orderkey':l_orderkey,'revenue':revenue,'o_orderdate':o_orderdate,'o_shippriority':o_shippriority}
 +from  Customer as c,
 +      Orders as o,
 +      LineItem as l
 +where (((c.c_mktsegment = 'BUILDING') and (c.c_custkey = o.o_custkey)) and ((l.l_orderkey = o.o_orderkey) and (o.o_orderdate < '1995-03-15') and (l.l_shipdate > '1995-03-15')))
 +/* +hash */
 +group by l.l_orderkey as l_orderkey,o.o_orderdate as o_orderdate,o.o_shippriority as o_shippriority
- with  revenue as tpch.sum((
++with  revenue as tpch.coll_sum((
 +      select element (i.l_extendedprice * (1 - i.l_discount))
 +      from  l as i
 +  ))
 +order by revenue desc,o_orderdate
 +limit 10
 +;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-sql-like/q04_order_priority/q04_order_priority.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-sql-like/q04_order_priority/q04_order_priority.3.query.sqlpp
index faca581,0000000..5293499
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-sql-like/q04_order_priority/q04_order_priority.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-sql-like/q04_order_priority/q04_order_priority.3.query.sqlpp
@@@ -1,36 -1,0 +1,36 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +
 +use tpch;
 +
 +
 +declare function tmp() {
 +(
 +    select distinct element {'o_orderkey':l.l_orderkey}
 +    from  LineItem as l
 +    where (l.l_commitdate < l.l_receiptdate)
 +)
 +};
- select element {'order_priority':o_orderpriority,'count':tpch.count(o)}
++select element {'order_priority':o_orderpriority,'count':COLL_COUNT(o)}
 +from  Orders as o,
 +      tpch.tmp() as t
 +where ((o.o_orderkey = t.o_orderkey) and (o.o_orderdate >= '1993-07-01') and (o.o_orderdate < '1993-10-01'))
 +group by o.o_orderpriority as o_orderpriority
 +order by o_orderpriority
 +;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-sql-like/q05_local_supplier_volume/q05_local_supplier_volume.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-sql-like/q05_local_supplier_volume/q05_local_supplier_volume.3.query.sqlpp
index 150e4b1,0000000..9050001
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-sql-like/q05_local_supplier_volume/q05_local_supplier_volume.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-sql-like/q05_local_supplier_volume/q05_local_supplier_volume.3.query.sqlpp
@@@ -1,54 -1,0 +1,54 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +
 +use tpch;
 +
 +
 +select element {'n_name':n_name,'revenue':revenue}
 +from  Customer as c,
 +      (
 +    select element {'n_name':l1.n_name,'l_extendedprice':l1.l_extendedprice,'l_discount':l1.l_discount,'s_nationkey':l1.s_nationkey,'o_custkey':o.o_custkey}
 +    from  Orders as o,
 +          (
 +        select element {'n_name':s1.n_name,'l_extendedprice':l.l_extendedprice,'l_discount':l.l_discount,'l_orderkey':l.l_orderkey,'s_nationkey':s1.s_nationkey}
 +        from  LineItem as l,
 +              (
 +            select element {'n_name':n1.n_name,'s_suppkey':s.s_suppkey,'s_nationkey':s.s_nationkey}
 +            from  Supplier as s,
 +                  (
 +                select element {'n_name':n.n_name,'n_nationkey':n.n_nationkey}
 +                from  Nation as n,
 +                      Region as r
 +                where (n.n_regionkey = r.r_regionkey)
 +            ) as n1
 +            where (s.s_nationkey = n1.n_nationkey)
 +        ) as s1
 +        where (l.l_suppkey = s1.s_suppkey)
 +    ) as l1
 +    where ((l1.l_orderkey = o.o_orderkey) and (o.o_orderdate >= '1990-01-01') and (o.o_orderdate < '1995-01-01'))
 +) as o1
 +where ((c.c_nationkey = o1.s_nationkey) and (c.c_custkey = o1.o_custkey))
 +/* +hash */
 +group by o1.n_name as n_name
- with  revenue as tpch.sum((
++with  revenue as tpch.coll_sum((
 +      select element (i.l_extendedprice * (1 - i.l_discount))
 +      from  o1 as i
 +  ))
 +order by revenue desc
 +;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-sql-like/q06_forecast_revenue_change/q06_forecast_revenue_change.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-sql-like/q06_forecast_revenue_change/q06_forecast_revenue_change.3.query.sqlpp
index 9dee61f,0000000..94ee1cd
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-sql-like/q06_forecast_revenue_change/q06_forecast_revenue_change.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-sql-like/q06_forecast_revenue_change/q06_forecast_revenue_change.3.query.sqlpp
@@@ -1,27 -1,0 +1,27 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +
 +use tpch;
 +
 +
- {'revenue':tpch.sum((
-     select element (l.l_extendedprice * l.l_discount)
-     from  LineItem as l
-     where ((l.l_shipdate >= '1994-01-01') and (l.l_shipdate < '1995-01-01') and (l.l_discount >= 0.05) and (l.l_discount <= 0.07) and (l.l_quantity < 24))
- ))};
++select sum(l.l_extendedprice * l.l_discount) as revenue
++from  LineItem as l
++where l.l_shipdate >= '1994-01-01' and l.l_shipdate < '1995-01-01' and l.l_discount >= 0.05
++  and l.l_discount <= 0.07 and l.l_quantity < 24
++;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-sql-like/q07_volume_shipping/q07_volume_shipping.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-sql-like/q07_volume_shipping/q07_volume_shipping.3.query.sqlpp
index 74d3b5a,0000000..f2838d7
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-sql-like/q07_volume_shipping/q07_volume_shipping.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-sql-like/q07_volume_shipping/q07_volume_shipping.3.query.sqlpp
@@@ -1,57 -1,0 +1,57 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +
 +use tpch;
 +
 +
 +declare function q7_volume_shipping_tmp() {
 +(
 +    select element {'supp_nation':n1.n_name,'cust_nation':n2.n_name,'s_nationkey':n1.n_nationkey,'c_nationkey':n2.n_nationkey}
 +    from  Nation as n1,
 +          Nation as n2
 +    where ((n2.n_name = 'GERMANY') or (n1.n_name = 'GERMANY'))
 +)
 +};
 +select element {'supp_nation':supp_nation,'cust_nation':cust_nation,'l_year':l_year,'revenue':revenue}
 +from  (
 +    select element {'l_shipdate':loc.l_shipdate,'l_extendedprice':loc.l_extendedprice,'l_discount':loc.l_discount,'c_nationkey':loc.c_nationkey,'s_nationkey':s.s_nationkey}
 +    from  (
 +        select element {'l_shipdate':lo.l_shipdate,'l_extendedprice':lo.l_extendedprice,'l_discount':lo.l_discount,'l_suppkey':lo.l_suppkey,'c_nationkey':c.c_nationkey}
 +        from  (
 +            select element {'l_shipdate':l.l_shipdate,'l_extendedprice':l.l_extendedprice,'l_discount':l.l_discount,'l_suppkey':l.l_suppkey,'o_custkey':o.o_custkey}
 +            from  LineItem as l,
 +                  Orders as o
 +            where ((o.o_orderkey = l.l_orderkey) and (l.l_shipdate >= '1992-01-01') and (l.l_shipdate <= '1996-12-31'))
 +        ) as lo,
 +              Customer as c
 +        where (c.c_custkey = lo.o_custkey)
 +    ) as loc,
 +          Supplier as s
 +    where (s.s_suppkey = loc.l_suppkey)
 +) as locs,
 +      tpch.q7_volume_shipping_tmp() as t
 +with  l_year0 as tpch."get-year"(locs.l_shipdate)
 +where ((locs.c_nationkey = t.c_nationkey) and (locs.s_nationkey = t.s_nationkey))
 +group by t.supp_nation as supp_nation,t.cust_nation as cust_nation,l_year0 as l_year
- with  revenue as tpch.sum((
++with  revenue as tpch.coll_sum((
 +      select element (i.l_extendedprice * (1 - i.l_discount))
 +      from  locs as i
 +  ))
 +order by supp_nation,cust_nation,l_year
 +;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-sql-like/q08_national_market_share/q08_national_market_share.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-sql-like/q08_national_market_share/q08_national_market_share.3.query.sqlpp
index 77d3881,0000000..a221b8e
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-sql-like/q08_national_market_share/q08_national_market_share.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-sql-like/q08_national_market_share/q08_national_market_share.3.query.sqlpp
@@@ -1,69 -1,0 +1,69 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +
 +use tpch;
 +
 +
- select element {'year':year,'mkt_share':(tpch.sum((
++select element {'year':year,'mkt_share':(tpch.coll_sum((
 +          select element tpch."switch-case"((i.s_name = 'BRAZIL'),true,i.revenue,false,0.0)
 +          from  t as i
-       )) / tpch.sum((
++      )) / tpch.coll_sum((
 +          select element i.revenue
 +          from  t as i
 +      )))}
 +from  (
 +    select element {'year':o_year,'revenue':(slnrcop.l_extendedprice * (1 - slnrcop.l_discount)),'s_name':n2.n_name}
 +    from  (
 +        select element {'o_orderdate':lnrcop.o_orderdate,'l_discount':lnrcop.l_discount,'l_extendedprice':lnrcop.l_extendedprice,'l_suppkey':lnrcop.l_suppkey,'s_nationkey':s.s_nationkey}
 +        from  Supplier as s,
 +              (
 +            select element {'o_orderdate':lnrco.o_orderdate,'l_discount':lnrco.l_discount,'l_extendedprice':lnrco.l_extendedprice,'l_suppkey':lnrco.l_suppkey}
 +            from  (
 +                select element {'o_orderdate':nrco.o_orderdate,'l_partkey':l.l_partkey,'l_discount':l.l_discount,'l_extendedprice':l.l_extendedprice,'l_suppkey':l.l_suppkey}
 +                from  LineItem as l,
 +                      (
 +                    select element {'o_orderdate':o.o_orderdate,'o_orderkey':o.o_orderkey}
 +                    from  Orders as o,
 +                          (
 +                        select element {'c_custkey':c.c_custkey}
 +                        from  Customer as c,
 +                              (
 +                            select element {'n_nationkey':n1.n_nationkey}
 +                            from  Nation as n1,
 +                                  Region as r1
 +                            where ((n1.n_regionkey = r1.r_regionkey) and (r1.r_name = 'AMERICA'))
 +                        ) as nr
 +                        where (c.c_nationkey = nr.n_nationkey)
 +                    ) as nrc
 +                    where (nrc.c_custkey = o.o_custkey)
 +                ) as nrco
 +                where ((l.l_orderkey = nrco.o_orderkey) and (nrco.o_orderdate >= '1995-01-01') and (nrco.o_orderdate < '1996-12-31'))
 +            ) as lnrco,
 +                  Part as p
 +            where ((p.p_partkey = lnrco.l_partkey) and (p.p_type = 'ECONOMY ANODIZED STEEL'))
 +        ) as lnrcop
 +        where (s.s_suppkey = lnrcop.l_suppkey)
 +    ) as slnrcop,
 +          Nation as n2
 +    with  o_year as tpch."get-year"(slnrcop.o_orderdate)
 +    where (slnrcop.s_nationkey = n2.n_nationkey)
 +) as t
 +group by t.year as year
 +order by year
 +;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-sql-like/q09_product_type_profit_nt/q09_product_type_profit_nt.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-sql-like/q09_product_type_profit_nt/q09_product_type_profit_nt.3.query.sqlpp
index f7e2e8f,0000000..2ea88c9
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-sql-like/q09_product_type_profit_nt/q09_product_type_profit_nt.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-sql-like/q09_product_type_profit_nt/q09_product_type_profit_nt.3.query.sqlpp
@@@ -1,57 -1,0 +1,57 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +
 +use tpch;
 +
 +
- select element {'nation':nation,'o_year':o_year,'sum_profit':tpch.sum((
++select element {'nation':nation,'o_year':o_year,'sum_profit':COLL_SUM((
 +        select element pr.amount
 +        from  profit as pr
 +    ))}
 +from  (
 +    select element {'nation':l3.n_name,'o_year':o_year,'amount':amount}
 +    from  Orders as o,
 +          (
 +        select element {'l_extendedprice':l2.l_extendedprice,'l_discount':l2.l_discount,'l_quantity':l2.l_quantity,'l_orderkey':l2.l_orderkey,'n_name':l2.n_name,'ps_supplycost':l2.ps_supplycost}
 +        from  Part as p,
 +              (
 +            select element {'l_extendedprice':l1.l_extendedprice,'l_discount':l1.l_discount,'l_quantity':l1.l_quantity,'l_partkey':l1.l_partkey,'l_orderkey':l1.l_orderkey,'n_name':l1.n_name,'ps_supplycost':ps.ps_supplycost}
 +            from  Partsupp as ps,
 +                  (
 +                select element {'l_suppkey':l.l_suppkey,'l_extendedprice':l.l_extendedprice,'l_discount':l.l_discount,'l_quantity':l.l_quantity,'l_partkey':l.l_partkey,'l_orderkey':l.l_orderkey,'n_name':s1.n_name}
 +                from  (
 +                    select element {'s_suppkey':s.s_suppkey,'n_name':n.n_name}
 +                    from  Supplier as s,
 +                          Nation as n
 +                    where (n.n_nationkey = s.s_nationkey)
 +                ) as s1,
 +                      LineItem as l
 +                where (s1.s_suppkey = l.l_suppkey)
 +            ) as l1
 +            where ((ps.ps_suppkey = l1.l_suppkey) and (ps.ps_partkey = l1.l_partkey))
 +        ) as l2
 +        where (tpch.contains(p.p_name,'green') and (p.p_partkey = l2.l_partkey))
 +    ) as l3
 +    with  amount as ((l3.l_extendedprice * (1 - l3.l_discount)) - (l3.ps_supplycost * l3.l_quantity)),
 +          o_year as tpch."get-year"(o.o_orderdate)
 +    where (o.o_orderkey = l3.l_orderkey)
 +) as profit
 +group by profit.nation as nation,profit.o_year as o_year
 +order by nation,o_year desc
 +;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-sql-like/q10_returned_item/q10_returned_item.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-sql-like/q10_returned_item/q10_returned_item.3.query.sqlpp
index f75030c,0000000..cce248c
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-sql-like/q10_returned_item/q10_returned_item.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-sql-like/q10_returned_item/q10_returned_item.3.query.sqlpp
@@@ -1,43 -1,0 +1,43 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +
 +use tpch;
 +
 +
 +select element {'c_custkey':c_custkey,'c_name':c_name,'revenue':revenue,'c_acctbal':c_acctbal,'n_name':n_name,'c_address':c_address,'c_phone':c_phone,'c_comment':c_comment}
 +from  (
 +    select element {'c_custkey':ocn.c_custkey,'c_name':ocn.c_name,'c_acctbal':ocn.c_acctbal,'n_name':ocn.n_name,'c_address':ocn.c_address,'c_phone':ocn.c_phone,'c_comment':ocn.c_comment,'l_extendedprice':l.l_extendedprice,'l_discount':l.l_discount}
 +    from  LineItem as l,
 +          (
 +        select element {'c_custkey':c.c_custkey,'c_name':c.c_name,'c_acctbal':c.c_acctbal,'n_name':n.n_name,'c_address':c.c_address,'c_phone':c.c_phone,'c_comment':c.c_comment,'o_orderkey':o.o_orderkey}
 +        from  Orders as o,
 +              Customer as c,
 +              Nation as n
 +        where (((c.c_custkey = o.o_custkey) and (o.o_orderdate >= '1993-10-01') and (o.o_orderdate < '1994-01-01')) and (c.c_nationkey = n.n_nationkey))
 +    ) as ocn
 +    where ((l.l_orderkey = ocn.o_orderkey) and (l.l_selectflag = 'R'))
 +) as locn
 +group by locn.c_custkey as c_custkey,locn.c_name as c_name,locn.c_acctbal as c_acctbal,locn.c_phone as c_phone,locn.n_name as n_name,locn.c_address as c_address,locn.c_comment as c_comment
- with  revenue as tpch.sum((
++with  revenue as COLL_SUM((
 +      select element (i.l_extendedprice * (1 - i.l_discount))
 +      from  locn as i
 +  ))
 +order by revenue desc
 +limit 20
 +;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-sql-like/q10_returned_item_int64/q10_returned_item_int64.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-sql-like/q10_returned_item_int64/q10_returned_item_int64.3.query.sqlpp
index f75030c,0000000..cce248c
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-sql-like/q10_returned_item_int64/q10_returned_item_int64.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-sql-like/q10_returned_item_int64/q10_returned_item_int64.3.query.sqlpp
@@@ -1,43 -1,0 +1,43 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +
 +use tpch;
 +
 +
 +select element {'c_custkey':c_custkey,'c_name':c_name,'revenue':revenue,'c_acctbal':c_acctbal,'n_name':n_name,'c_address':c_address,'c_phone':c_phone,'c_comment':c_comment}
 +from  (
 +    select element {'c_custkey':ocn.c_custkey,'c_name':ocn.c_name,'c_acctbal':ocn.c_acctbal,'n_name':ocn.n_name,'c_address':ocn.c_address,'c_phone':ocn.c_phone,'c_comment':ocn.c_comment,'l_extendedprice':l.l_extendedprice,'l_discount':l.l_discount}
 +    from  LineItem as l,
 +          (
 +        select element {'c_custkey':c.c_custkey,'c_name':c.c_name,'c_acctbal':c.c_acctbal,'n_name':n.n_name,'c_address':c.c_address,'c_phone':c.c_phone,'c_comment':c.c_comment,'o_orderkey':o.o_orderkey}
 +        from  Orders as o,
 +              Customer as c,
 +              Nation as n
 +        where (((c.c_custkey = o.o_custkey) and (o.o_orderdate >= '1993-10-01') and (o.o_orderdate < '1994-01-01')) and (c.c_nationkey = n.n_nationkey))
 +    ) as ocn
 +    where ((l.l_orderkey = ocn.o_orderkey) and (l.l_selectflag = 'R'))
 +) as locn
 +group by locn.c_custkey as c_custkey,locn.c_name as c_name,locn.c_acctbal as c_acctbal,locn.c_phone as c_phone,locn.n_name as n_name,locn.c_address as c_address,locn.c_comment as c_comment
- with  revenue as tpch.sum((
++with  revenue as COLL_SUM((
 +      select element (i.l_extendedprice * (1 - i.l_discount))
 +      from  locn as i
 +  ))
 +order by revenue desc
 +limit 20
 +;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-sql-like/q11_important_stock/q11_important_stock.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-sql-like/q11_important_stock/q11_important_stock.3.query.sqlpp
index ea3bd5a,0000000..073835c
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-sql-like/q11_important_stock/q11_important_stock.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-sql-like/q11_important_stock/q11_important_stock.3.query.sqlpp
@@@ -1,52 -1,0 +1,52 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +
 +use tpch;
 +
 +
- with  sum as tpch.sum((
++with  sum as tpch.coll_sum((
 +      select element (ps.ps_supplycost * ps.ps_availqty)
 +      from  Partsupp as ps,
 +            (
 +          select element {'s_suppkey':s.s_suppkey}
 +          from  Supplier as s,
 +                Nation as n
 +          where (s.s_nationkey = n.n_nationkey)
 +      ) as sn
 +      where (ps.ps_suppkey = sn.s_suppkey)
 +  ))
 +select element {'partkey':t1.ps_partkey,'part_value':t1.part_value}
 +from  (
-     select element {'ps_partkey':ps_partkey,'part_value':tpch.sum((
++    select element {'ps_partkey':ps_partkey,'part_value':tpch.coll_sum((
 +            select element (i.ps_supplycost * i.ps_availqty)
 +            from  ps as i
 +        ))}
 +    from  Partsupp as ps,
 +          (
 +        select element {'s_suppkey':s.s_suppkey}
 +        from  Supplier as s,
 +              Nation as n
 +        where (s.s_nationkey = n.n_nationkey)
 +    ) as sn
 +    where (ps.ps_suppkey = sn.s_suppkey)
 +    group by ps.ps_partkey as ps_partkey
 +) as t1
 +where (t1.part_value > (sum * 0.00001))
 +order by t1.part_value desc
 +;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-sql-like/q12_shipping/q12_shipping.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-sql-like/q12_shipping/q12_shipping.3.query.sqlpp
index 61b685e,0000000..2bf33fc
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-sql-like/q12_shipping/q12_shipping.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-sql-like/q12_shipping/q12_shipping.3.query.sqlpp
@@@ -1,35 -1,0 +1,35 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +
 +use tpch;
 +
 +
- select element {'l_shipmode':l_shipmode,'high_line_count':tpch.sum((
++select element {'l_shipmode':l_shipmode,'high_line_count':tpch.coll_sum((
 +        select element tpch."switch-case"(((i.o_orderpriority = '1-URGENT') or (i.o_orderpriority = '2-HIGH')),true,1,false,0)
 +        from  o as i
-     )),'low_line_count':tpch.sum((
++    )),'low_line_count':tpch.coll_sum((
 +        select element tpch."switch-case"(((i.o_orderpriority = '1-URGENT') or (i.o_orderpriority = '2-HIGH')),true,0,false,1)
 +        from  o as i
 +    ))}
 +from  LineItem as l,
 +      Orders as o
 +where ((o.o_orderkey = l.l_orderkey) and (l.l_commitdate < l.l_receiptdate) and (l.l_shipdate < l.l_commitdate) and (l.l_receiptdate >= '1994-01-01') and (l.l_receiptdate < '1995-01-01') and ((l.l_shipmode = 'MAIL') or (l.l_shipmode = 'SHIP')))
 +group by l.l_shipmode as l_shipmode
 +order by l_shipmode
 +;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-sql-like/q13_customer_distribution/q13_customer_distribution.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-sql-like/q13_customer_distribution/q13_customer_distribution.3.query.sqlpp
index afa7c9b,0000000..d21e1f5
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-sql-like/q13_customer_distribution/q13_customer_distribution.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-sql-like/q13_customer_distribution/q13_customer_distribution.3.query.sqlpp
@@@ -1,44 -1,0 +1,44 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +
 +use tpch;
 +
 +
 +set "import-private-functions" "true";
 +
 +select element {'c_count':c_count,'custdist':custdist}
 +from  (
-     select element {'c_custkey':c_custkey,'c_count':tpch.sum((
++    select element {'c_custkey':c_custkey,'c_count':tpch.coll_sum((
 +            select element i.o_orderkey_count
 +            from  co as i
 +        ))}
 +    from  (
-         select element {'c_custkey':c.c_custkey,'o_orderkey_count':tpch.count((
++        select element {'c_custkey':c.c_custkey,'o_orderkey_count':coll_count((
 +                select element o.o_orderkey
 +                from  Orders as o
 +                where ((c.c_custkey = o.o_custkey) and tpch.not(tpch.like(o.o_comment,'%special%requests%')))
 +            ))}
 +        from  Customer as c
 +    ) as co
 +    group by co.c_custkey as c_custkey
 +) as gco
 +group by gco.c_count as c_count
 +with  custdist as tpch.count(gco)
 +order by custdist desc,c_count desc
 +;



[33/50] [abbrv] incubator-asterixdb git commit: Merge branch 'master' into hyracks-merge2

Posted by im...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/optimizerts/queries_sqlpp/distinct_aggregate.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/optimizerts/queries_sqlpp/distinct_aggregate.sqlpp
index 877bb0f,0000000..6b95c9c
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/optimizerts/queries_sqlpp/distinct_aggregate.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/optimizerts/queries_sqlpp/distinct_aggregate.sqlpp
@@@ -1,62 -1,0 +1,62 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +
 +drop  database tpch if exists;
 +create  database tpch;
 +
 +use tpch;
 +
 +
 +create type tpch.LineItemType as
 + closed {
 +  l_orderkey : int32,
 +  l_partkey : int32,
 +  l_suppkey : int32,
 +  l_linenumber : int32,
 +  l_quantity : double,
 +  l_extendedprice : double,
 +  l_discount : double,
 +  l_tax : double,
 +  l_returnflag : string,
 +  l_linestatus : string,
 +  l_shipdate : string,
 +  l_commitdate : string,
 +  l_receiptdate : string,
 +  l_shipinstruct : string,
 +  l_shipmode : string,
 +  l_comment : string
 +}
 +
- create  nodegroup group1 if not exists  on 
++create  nodegroup group1 if not exists  on
 +    asterix_nc1,
 +    asterix_nc2
 +;
 +create  table LineItems_q1(LineItemType) primary key l_orderkey,l_linenumber on group1;
 +
 +write output to asterix_nc1:"rttest/tpch_q1_pricing_summary_report_nt.adm"
- select element {'l_returnflag':l_returnflag,'l_linestatus':l_linestatus,'count_suppkey':tpch.count(g)}
++select element {'l_returnflag':l_returnflag,'l_linestatus':l_linestatus,'count_suppkey':count(g)}
 +from  (
 +    select element {'l_returnflag':l_returnflag,'l_linestatus':l_linestatus,'l_suppkey':l_suppkey}
 +    from  LineItems_q1 as l
 +    where (l.l_shipdate <= '1998-09-02')
 +    group by l.l_returnflag as l_returnflag,l.l_linestatus as l_linestatus,l.l_suppkey as l_suppkey
 +) as g
 +group by g.l_returnflag as l_returnflag,g.l_linestatus as l_linestatus
 +order by l_returnflag,l_linestatus
 +;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/optimizerts/queries_sqlpp/fj-phase1.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/optimizerts/queries_sqlpp/fj-phase1.sqlpp
index 4e274e5,0000000..f1d5086
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/optimizerts/queries_sqlpp/fj-phase1.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/optimizerts/queries_sqlpp/fj-phase1.sqlpp
@@@ -1,65 -1,0 +1,65 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +
 +drop  database rares03 if exists;
 +create  database rares03;
 +
 +use rares03;
 +
 +
 +create type rares03.UserType as
 +{
 +  uid : int32,
 +  name : string,
 +  lottery_numbers : {{int32}}
 +}
 +
 +create type rares03.VisitorType as
 +{
 +  vid : int32,
 +  name : string,
 +  lottery_numbers : {{int32}}
 +}
 +
 +create  nodegroup group1 if not exists  on 
 +    asterix_nc1,
 +    asterix_nc2
 +;
 +create  table Users(UserType) primary key uid on group1;
 +
 +create  table Visitors(VisitorType) primary key vid on group1;
 +
 +write output to asterix_nc1:"/tmp/rares03.adm"
 +select element {'uid':user.uid,'tokens':tokens}
 +from  Users as user
 +with  tokens as (
 +      select element i
 +      from  user.lottery_numbers as lottery_number,
 +            (
 +          select element item
 +          from  Users as user,
 +                user.lottery_numbers as lottery_number
 +          group by lottery_number as item
-           with  count as rares03.count(user)
++          with  count as count(user)
 +          order by count desc
 +      ) as token at i
 +      where (lottery_number = token)
 +      order by token
 +  )
 +;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/optimizerts/queries_sqlpp/fj-phase2-with-hints.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/optimizerts/queries_sqlpp/fj-phase2-with-hints.sqlpp
index 7850337,0000000..c82f62c
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/optimizerts/queries_sqlpp/fj-phase2-with-hints.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/optimizerts/queries_sqlpp/fj-phase2-with-hints.sqlpp
@@@ -1,60 -1,0 +1,60 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +
 +drop  database fuzzyjoin_078 if exists;
 +create  database fuzzyjoin_078;
 +
 +use fuzzyjoin_078;
 +
 +
 +create type fuzzyjoin_078.DBLPType as
 +{
 +  id : int32,
 +  dblpid : string,
 +  title : string,
 +  authors : string,
 +  misc : string
 +}
 +
 +create  nodegroup group1 if not exists  on 
 +    asterix_nc1,
 +    asterix_nc2
 +;
 +create  table DBLP_fuzzyjoin_078(DBLPType) primary key id on group1;
 +
 +write output to asterix_nc1:"rttest/fuzzyjoin_078.adm"
 +select element {'id':paperDBLP.id,'tokens':tokensDBLP}
 +from  DBLP_fuzzyjoin_078 as paperDBLP
 +with  unrankedTokensDBLP as fuzzyjoin_078."counthashed-word-tokens"(paperDBLP.title),
 +      tokensDBLP as (
 +      select element i
 +      from  unrankedTokensDBLP as token,
 +            (
 +          select element tokenGroupped
 +          from  DBLP_fuzzyjoin_078 as paper,
 +                fuzzyjoin_078."counthashed-word-tokens"(paper.title) as token
 +          /* +hash */
 +          group by token as tokenGroupped
-           order by fuzzyjoin_078.count(paper),tokenGroupped
++          order by count(paper),tokenGroupped
 +      ) as tokenRanked at i
 +      where (token = tokenRanked)
 +      order by i
 +  )
 +order by paperDBLP.id
 +;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/optimizerts/queries_sqlpp/inlined_q18_large_volume_customer.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/optimizerts/queries_sqlpp/inlined_q18_large_volume_customer.sqlpp
index 10bf154,0000000..91680fe
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/optimizerts/queries_sqlpp/inlined_q18_large_volume_customer.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/optimizerts/queries_sqlpp/inlined_q18_large_volume_customer.sqlpp
@@@ -1,101 -1,0 +1,101 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +
 +drop  database inlined_q18_large_volume_customer if exists;
 +create  database inlined_q18_large_volume_customer;
 +
 +use inlined_q18_large_volume_customer;
 +
 +
 +create type inlined_q18_large_volume_customer.LineItemType as
 + closed {
 +  l_orderkey : int32,
 +  l_partkey : int32,
 +  l_suppkey : int32,
 +  l_linenumber : int32,
 +  l_quantity : double,
 +  l_extendedprice : double,
 +  l_discount : double,
 +  l_tax : double,
 +  l_returnflag : string,
 +  l_linestatus : string,
 +  l_shipdate : string,
 +  l_commitdate : string,
 +  l_receiptdate : string,
 +  l_shipinstruct : string,
 +  l_shipmode : string,
 +  l_comment : string
 +}
 +
 +create type inlined_q18_large_volume_customer.OrderType as
 + closed {
 +  o_orderkey : int32,
 +  o_custkey : int32,
 +  o_orderstatus : string,
 +  o_totalprice : double,
 +  o_orderdate : string,
 +  o_orderpriority : string,
 +  o_clerk : string,
 +  o_shippriority : int32,
 +  o_comment : string
 +}
 +
 +create type inlined_q18_large_volume_customer.CustomerType as
 + closed {
 +  c_custkey : int32,
 +  c_name : string,
 +  c_address : string,
 +  c_nationkey : int32,
 +  c_phone : string,
 +  c_acctbal : double,
 +  c_mktsegment : string,
 +  c_comment : string
 +}
 +
 +create  nodegroup group1 if not exists  on 
 +    asterix_nc1,
 +    asterix_nc2
 +;
 +create  table LineItems(LineItemType) primary key l_orderkey,l_linenumber on group1;
 +
 +create  table Orders(OrderType) primary key o_orderkey on group1;
 +
 +create  table Customers(CustomerType) primary key c_custkey on group1;
 +
 +write output to asterix_nc1:"/tmp/inlined_q18_large_volume_customer.adm"
- select element {'c_name':c_name,'c_custkey':c_custkey,'o_orderkey':o_orderkey,'o_orderdate':o_orderdate,'o_totalprice':o_totalprice,'sum_quantity':inlined_q18_large_volume_customer.sum((
++select element {'c_name':c_name,'c_custkey':c_custkey,'o_orderkey':o_orderkey,'o_orderdate':o_orderdate,'o_totalprice':o_totalprice,'sum_quantity':inlined_q18_large_volume_customer.coll_sum((
 +        select element j.l_quantity
 +        from  l as j
 +    ))}
 +from  Customers as c,
 +      Orders as o,
 +      (
-     select element {'l_orderkey':l_orderkey,'t_sum_quantity':inlined_q18_large_volume_customer.sum((
++    select element {'l_orderkey':l_orderkey,'t_sum_quantity':inlined_q18_large_volume_customer.coll_sum((
 +            select element i.l_quantity
 +            from  l as i
 +        ))}
 +    from  LineItems as l
 +    group by l.l_orderkey as l_orderkey
 +) as t,
 +      LineItems as l
 +where ((c.c_custkey = o.o_custkey) and ((o.o_orderkey = t.l_orderkey) and (t.t_sum_quantity > 300)) and (l.l_orderkey = o.o_orderkey))
 +group by c.c_name as c_name,c.c_custkey as c_custkey,o.o_orderkey as o_orderkey,o.o_orderdate as o_orderdate,o.o_totalprice as o_totalprice
 +order by o_totalprice desc,o_orderdate
 +limit 100
 +;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/optimizerts/queries_sqlpp/nest_aggregate.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/optimizerts/queries_sqlpp/nest_aggregate.sqlpp
index 2b67f09,0000000..80fb9bc
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/optimizerts/queries_sqlpp/nest_aggregate.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/optimizerts/queries_sqlpp/nest_aggregate.sqlpp
@@@ -1,137 -1,0 +1,137 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +/*
 + * Description  : This test case is to verify the fix for issue782
 + * https://code.google.com/p/asterixdb/issues/detail?id=782
 + * Expected Res : SUCCESS
 + * Date         : 2nd Jun 2014
 + */
 +
 +drop  database tpch if exists;
 +create  database tpch;
 +
 +use tpch;
 +
 +
 +create type tpch.OrderType as
 + closed {
 +  o_orderkey : int32,
 +  o_custkey : int32,
 +  o_orderstatus : string,
 +  o_totalprice : double,
 +  o_orderdate : string,
 +  o_orderpriority : string,
 +  o_clerk : string,
 +  o_shippriority : int32,
 +  o_comment : string
 +}
 +
 +create type tpch.CustomerType as
 + closed {
 +  c_custkey : int32,
 +  c_name : string,
 +  c_address : string,
 +  c_nationkey : int32,
 +  c_phone : string,
 +  c_acctbal : double,
 +  c_mktsegment : string,
 +  c_comment : string
 +}
 +
 +create type tpch.SupplierType as
 + closed {
 +  s_suppkey : int32,
 +  s_name : string,
 +  s_address : string,
 +  s_nationkey : int32,
 +  s_phone : string,
 +  s_acctbal : double,
 +  s_comment : string
 +}
 +
 +create type tpch.NationType as
 + closed {
 +  n_nationkey : int32,
 +  n_name : string,
 +  n_regionkey : int32,
 +  n_comment : string
 +}
 +
 +create type tpch.RegionType as
 + closed {
 +  r_regionkey : int32,
 +  r_name : string,
 +  r_comment : string
 +}
 +
 +create type tpch.PartType as
 + closed {
 +  p_partkey : int32,
 +  p_name : string,
 +  p_mfgr : string,
 +  p_brand : string,
 +  p_type : string,
 +  p_size : int32,
 +  p_container : string,
 +  p_retailprice : double,
 +  p_comment : string
 +}
 +
 +create type tpch.PartSuppType as
 + closed {
 +  ps_partkey : int32,
 +  ps_suppkey : int32,
 +  ps_availqty : int32,
 +  ps_supplycost : double,
 +  ps_comment : string
 +}
 +
 +create  table Orders(OrderType) primary key o_orderkey;
 +
 +create  table Supplier(SupplierType) primary key s_suppkey;
 +
 +create  table Region(RegionType) primary key r_regionkey;
 +
 +create  table Nation(NationType) primary key n_nationkey;
 +
 +create  table Part(PartType) primary key p_partkey;
 +
 +create  table Partsupp(PartSuppType) primary key ps_partkey,ps_suppkey;
 +
 +create  table Customer(CustomerType) primary key c_custkey;
 +
 +create  table SelectedNation(NationType) primary key n_nationkey;
 +
 +select element {'nation_key':nation.n_nationkey,'name':nation.n_name,'aggregates':(
 +        select element {'order_date':orderdate,'sum_price':sum}
 +        from  Orders as orders,
 +              Customer as customer
 +        where ((orders.o_custkey = customer.c_custkey) and (customer.c_nationkey = nation.n_nationkey))
 +        group by orders.o_orderdate as orderdate
-         with  sum as tpch.sum((
++        with  sum as tpch.coll_sum((
 +              select element o.o_totalprice
 +              from  orders as o
 +          ))
 +        order by sum
 +        limit 3
 +    )}
 +from  Nation as nation,
 +      SelectedNation as sn
 +where (nation.n_nationkey = sn.n_nationkey)
 +;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/optimizerts/queries_sqlpp/orders-aggreg.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/optimizerts/queries_sqlpp/orders-aggreg.sqlpp
index 4a6203a,0000000..c216c66
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/optimizerts/queries_sqlpp/orders-aggreg.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/optimizerts/queries_sqlpp/orders-aggreg.sqlpp
@@@ -1,52 -1,0 +1,52 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +
 +drop  database "orders-aggreg" if exists;
 +create  database "orders-aggreg";
 +
 +use "orders-aggreg";
 +
 +
 +create type "orders-aggreg".OrderType as
 + closed {
 +  oid : int32,
 +  cid : int32,
 +  orderstatus : string,
 +  orderpriority : string,
 +  clerk : string,
 +  total : float
 +}
 +
- create  nodegroup group1 if not exists  on 
++create  nodegroup group1 if not exists  on
 +    asterix_nc1,
 +    asterix_nc2
 +;
 +create  table Orders(OrderType) primary key oid on group1;
 +
 +write output to asterix_nc1:"/tmp/orders-aggreg.adm"
- select element {'cid':cid,'ordpercust':"orders-aggreg".count(o),'totalcust':"orders-aggreg".sum((
++select element {'cid':cid,'ordpercust':"orders-aggreg".coll_count(o),'totalcust':"orders-aggreg".coll_sum((
 +        select element i.total
 +        from  o as i
-     )),'avgcust':"orders-aggreg".avg((
++    )),'avgcust':"orders-aggreg".coll_avg((
 +        select element i.total
 +        from  o as i
 +    ))}
 +from  Orders as o
 +group by o.cid as cid
 +;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/optimizerts/queries_sqlpp/q01_pricing_summary_report_nt.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/optimizerts/queries_sqlpp/q01_pricing_summary_report_nt.sqlpp
index c6c79b9,0000000..9642265
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/optimizerts/queries_sqlpp/q01_pricing_summary_report_nt.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/optimizerts/queries_sqlpp/q01_pricing_summary_report_nt.sqlpp
@@@ -1,78 -1,0 +1,78 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +
 +drop  database tpch if exists;
 +create  database tpch;
 +
 +use tpch;
 +
 +
 +create type tpch.LineItemType as
 + closed {
 +  l_orderkey : int32,
 +  l_partkey : int32,
 +  l_suppkey : int32,
 +  l_linenumber : int32,
 +  l_quantity : double,
 +  l_extendedprice : double,
 +  l_discount : double,
 +  l_tax : double,
 +  l_returnflag : string,
 +  l_linestatus : string,
 +  l_shipdate : string,
 +  l_commitdate : string,
 +  l_receiptdate : string,
 +  l_shipinstruct : string,
 +  l_shipmode : string,
 +  l_comment : string
 +}
 +
 +create  table LineItem(LineItemType) primary key l_orderkey,l_linenumber;
 +
 +load  table LineItem using localfs (("path"="asterix_nc1://data/tpch0.001/lineitem.tbl"),("format"="delimited-text"),("delimiter"="|")) pre-sorted;
 +
 +write output to asterix_nc1:"rttest/tpch_q1_pricing_summary_report_nt.adm"
- select element {'l_returnflag':l_returnflag,'l_linestatus':l_linestatus,'sum_qty':tpch.sum((
++select element {'l_returnflag':l_returnflag,'l_linestatus':l_linestatus,'sum_qty':tpch.coll_sum((
 +        select element i.l_quantity
 +        from  l as i
-     )),'sum_base_price':tpch.sum((
++    )),'sum_base_price':tpch.coll_sum((
 +        select element i.l_extendedprice
 +        from  l as i
-     )),'sum_disc_price':tpch.sum((
++    )),'sum_disc_price':tpch.coll_sum((
 +        select element (i.l_extendedprice * (1 - i.l_discount))
 +        from  l as i
-     )),'sum_charge':tpch.sum((
++    )),'sum_charge':tpch.coll_sum((
 +        select element (i.l_extendedprice * (1 - i.l_discount) * (1 + i.l_tax))
 +        from  l as i
-     )),'ave_qty':tpch.avg((
++    )),'ave_qty':tpch.coll_avg((
 +        select element i.l_quantity
 +        from  l as i
-     )),'ave_price':tpch.avg((
++    )),'ave_price':tpch.coll_avg((
 +        select element i.l_extendedprice
 +        from  l as i
-     )),'ave_disc':tpch.avg((
++    )),'ave_disc':tpch.coll_avg((
 +        select element i.l_discount
 +        from  l as i
-     )),'count_order':tpch.count(l)}
++    )),'count_order':tpch.coll_count(l)}
 +from  LineItem as l
 +where (l.l_shipdate <= '1998-09-02')
 +/* +hash */
 +group by l.l_returnflag as l_returnflag,l.l_linestatus as l_linestatus
 +order by l_returnflag,l_linestatus
 +;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/optimizerts/queries_sqlpp/q03_shipping_priority.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/optimizerts/queries_sqlpp/q03_shipping_priority.sqlpp
index 7c05feb,0000000..6980bdd
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/optimizerts/queries_sqlpp/q03_shipping_priority.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/optimizerts/queries_sqlpp/q03_shipping_priority.sqlpp
@@@ -1,95 -1,0 +1,95 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +
 +drop  database q3_shipping_priority if exists;
 +create  database q3_shipping_priority;
 +
 +use q3_shipping_priority;
 +
 +
 +create type q3_shipping_priority.LineItemType as
 + closed {
 +  l_orderkey : int32,
 +  l_partkey : int32,
 +  l_suppkey : int32,
 +  l_linenumber : int32,
 +  l_quantity : double,
 +  l_extendedprice : double,
 +  l_discount : double,
 +  l_tax : double,
 +  l_returnflag : string,
 +  l_linestatus : string,
 +  l_shipdate : string,
 +  l_commitdate : string,
 +  l_receiptdate : string,
 +  l_shipinstruct : string,
 +  l_shipmode : string,
 +  l_comment : string
 +}
 +
 +create type q3_shipping_priority.OrderType as
 + closed {
 +  o_orderkey : int32,
 +  o_custkey : int32,
 +  o_orderstatus : string,
 +  o_totalprice : double,
 +  o_orderdate : string,
 +  o_orderpriority : string,
 +  o_clerk : string,
 +  o_shippriority : int32,
 +  o_comment : string
 +}
 +
 +create type q3_shipping_priority.CustomerType as
 + closed {
 +  c_custkey : int32,
 +  c_name : string,
 +  c_address : string,
 +  c_nationkey : int32,
 +  c_phone : string,
 +  c_acctbal : double,
 +  c_mktsegment : string,
 +  c_comment : string
 +}
 +
 +create  nodegroup group1 if not exists  on 
 +    asterix_nc1,
 +    asterix_nc2
 +;
 +create  table LineItems(LineItemType) primary key l_orderkey,l_linenumber on group1;
 +
 +create  table Orders(OrderType) primary key o_orderkey on group1;
 +
 +create  table Customers(CustomerType) primary key c_custkey on group1;
 +
 +write output to asterix_nc1:"/tmp/q3_shipping_priority.adm"
 +select element {'l_orderkey':l_orderkey,'revenue':revenue,'o_orderdate':o_orderdate,'o_shippriority':o_shippriority}
 +from  Customers as c,
 +      Orders as o,
 +      LineItems as l
 +where (((c.c_mktsegment = 'BUILDING') and (c.c_custkey = o.o_custkey)) and ((l.l_orderkey = o.o_orderkey) and (o.o_orderdate < '1995-03-15') and (l.l_shipdate > '1995-03-15')))
 +/* +hash */
 +group by l.l_orderkey as l_orderkey,o.o_orderdate as o_orderdate,o.o_shippriority as o_shippriority
- with  revenue as q3_shipping_priority.sum((
++with  revenue as q3_shipping_priority.coll_sum((
 +      select element (i.l_extendedprice * (1 - i.l_discount))
 +      from  l as i
 +  ))
 +order by revenue desc,o_orderdate
 +limit 10
 +;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/optimizerts/queries_sqlpp/q05_local_supplier_volume.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/optimizerts/queries_sqlpp/q05_local_supplier_volume.sqlpp
index 58093e0,0000000..df9b375
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/optimizerts/queries_sqlpp/q05_local_supplier_volume.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/optimizerts/queries_sqlpp/q05_local_supplier_volume.sqlpp
@@@ -1,145 -1,0 +1,145 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +
 +drop  database q5_local_supplier if exists;
 +create  database q5_local_supplier;
 +
 +use q5_local_supplier;
 +
 +
 +create type q5_local_supplier.LineItemType as
 + closed {
 +  l_orderkey : int32,
 +  l_partkey : int32,
 +  l_suppkey : int32,
 +  l_linenumber : int32,
 +  l_quantity : double,
 +  l_extendedprice : double,
 +  l_discount : double,
 +  l_tax : double,
 +  l_returnflag : string,
 +  l_linestatus : string,
 +  l_shipdate : string,
 +  l_commitdate : string,
 +  l_receiptdate : string,
 +  l_shipinstruct : string,
 +  l_shipmode : string,
 +  l_comment : string
 +}
 +
 +create type q5_local_supplier.OrderType as
 + closed {
 +  o_orderkey : int32,
 +  o_custkey : int32,
 +  o_orderstatus : string,
 +  o_totalprice : double,
 +  o_orderdate : string,
 +  o_orderpriority : string,
 +  o_clerk : string,
 +  o_shippriority : int32,
 +  o_comment : string
 +}
 +
 +create type q5_local_supplier.CustomerType as
 + closed {
 +  c_custkey : int32,
 +  c_name : string,
 +  c_address : string,
 +  c_nationkey : int32,
 +  c_phone : string,
 +  c_acctbal : double,
 +  c_mktsegment : string,
 +  c_comment : string
 +}
 +
 +create type q5_local_supplier.SupplierType as
 + closed {
 +  s_suppkey : int32,
 +  s_name : string,
 +  s_address : string,
 +  s_nationkey : int32,
 +  s_phone : string,
 +  s_acctbal : double,
 +  s_comment : string
 +}
 +
 +create type q5_local_supplier.NationType as
 + closed {
 +  n_nationkey : int32,
 +  n_name : string,
 +  n_regionkey : int32,
 +  n_comment : string
 +}
 +
 +create type q5_local_supplier.RegionType as
 + closed {
 +  r_regionkey : int32,
 +  r_name : string,
 +  r_comment : string
 +}
 +
 +create  nodegroup group1 if not exists  on 
 +    asterix_nc1,
 +    asterix_nc2
 +;
 +create  table LineItems(LineItemType) primary key l_orderkey,l_linenumber on group1;
 +
 +create  table Orders(OrderType) primary key o_orderkey on group1;
 +
 +create  table Customers(CustomerType) primary key c_custkey on group1;
 +
 +create  table Suppliers(SupplierType) primary key s_suppkey on group1;
 +
 +create  table Nations(NationType) primary key n_nationkey on group1;
 +
 +create  table Regions(RegionType) primary key r_regionkey on group1;
 +
 +write output to asterix_nc1:"/tmp/q5_local_supplier.adm"
 +select element {'n_name':n_name,'revenue':revenue}
 +from  Customers as c,
 +      (
 +    select element {'n_name':l1.n_name,'l_extendedprice':l1.l_extendedprice,'l_discount':l1.l_discount,'s_nationkey':l1.s_nationkey,'o_custkey':o.o_custkey}
 +    from  Orders as o,
 +          (
 +        select element {'n_name':s1.n_name,'l_extendedprice':l.l_extendedprice,'l_discount':l.l_discount,'l_orderkey':l.l_orderkey,'s_nationkey':s1.s_nationkey}
 +        from  LineItems as l,
 +              (
 +            select element {'n_name':n1.n_name,'s_suppkey':s.s_suppkey,'s_nationkey':s.s_nationkey}
 +            from  Suppliers as s,
 +                  (
 +                select element {'n_name':n.n_name,'n_nationkey':n.n_nationkey}
 +                from  Nations as n,
 +                      Regions as r
 +                where ((n.n_regionkey = r.r_regionkey) and (r.r_name = 'ASIA'))
 +            ) as n1
 +            where (s.s_nationkey = n1.n_nationkey)
 +        ) as s1
 +        where (l.l_suppkey = s1.s_suppkey)
 +    ) as l1
 +    where ((l1.l_orderkey = o.o_orderkey) and (o.o_orderdate >= '1994-01-01') and (o.o_orderdate < '1995-01-01'))
 +) as o1
 +where ((c.c_nationkey = o1.s_nationkey) and (c.c_custkey = o1.o_custkey))
 +/* +hash */
 +group by o1.n_name as n_name
- with  revenue as q5_local_supplier.sum((
++with  revenue as q5_local_supplier.coll_sum((
 +      select element (i.l_extendedprice * (1 - i.l_discount))
 +      from  o1 as i
 +  ))
 +order by revenue desc
 +;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/optimizerts/queries_sqlpp/q2.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/optimizerts/queries_sqlpp/q2.sqlpp
index 74a8f59,0000000..5472ad0
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/optimizerts/queries_sqlpp/q2.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/optimizerts/queries_sqlpp/q2.sqlpp
@@@ -1,73 -1,0 +1,73 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +
 +drop  database events if exists;
 +create  database events;
 +
 +use events;
 +
 +
 +create type events.AddressType as
 + closed {
 +  street : string,
 +  city : string,
 +  zip : string
 +}
 +
 +create type events.EventType as
 + closed {
 +  name : string,
 +  location : AddressType?,
 +  organizers : {{{
 +          name : string,
 +          role : string
 +      }
 +}},
 +  sponsoring_sigs : [{
 +          sig_name : string,
 +          chapter_name : string
 +      }
 +],
 +  interest_keywords : {{string}},
 +  price : double?,
 +  start_time : datetime,
 +  end_time : datetime
 +}
 +
 +create  nodegroup group1 if not exists  on 
 +    asterix_nc1,
 +    asterix_nc2
 +;
 +create  table Event(EventType) primary key name on group1;
 +
 +write output to asterix_nc1:"/tmp/q2.adm"
 +select element {'sig_name':sig_name,'total_count':sig_sponsorship_count,'chapter_breakdown':by_chapter}
 +from  Event as event,
 +      event.sponsoring_sigs as sponsor
 +with  es as {'event':event,'sponsor':sponsor}
 +group by sponsor.sig_name as sig_name
- with  sig_sponsorship_count as events.count(es),
++with  sig_sponsorship_count as count(es),
 +      by_chapter as (
-       select element {'chapter_name':chapter_name,'escount':events.count(e)}
++      select element {'chapter_name':chapter_name,'escount':count(e)}
 +      from  es as e
 +      group by e.sponsor.chapter_name as chapter_name
 +  )
 +order by sig_sponsorship_count desc
 +limit 5
 +;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/optimizerts/queries_sqlpp/query-issue562.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/optimizerts/queries_sqlpp/query-issue562.sqlpp
index 5fb9440,0000000..c87d0d0
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/optimizerts/queries_sqlpp/query-issue562.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/optimizerts/queries_sqlpp/query-issue562.sqlpp
@@@ -1,107 -1,0 +1,107 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +/*
 + * Description  : This test case is to verify the fix for issue562
 + * https://code.google.com/p/asterixdb/issues/detail?id=562
 + * Expected Res : SUCCESS
 + * Date         : 15th Jan. 2015
 + */
 +
 +drop  database tpch if exists;
 +create  database tpch;
 +
 +use tpch;
 +
 +
 +create type tpch.LineItemType as
 + closed {
 +  l_orderkey : int32,
 +  l_partkey : int32,
 +  l_suppkey : int32,
 +  l_linenumber : int32,
 +  l_quantity : int32,
 +  l_extendedprice : double,
 +  l_discount : double,
 +  l_tax : double,
 +  l_returnflag : string,
 +  l_linestatus : string,
 +  l_shipdate : string,
 +  l_commitdate : string,
 +  l_receiptdate : string,
 +  l_shipinstruct : string,
 +  l_shipmode : string,
 +  l_comment : string
 +}
 +
 +create type tpch.OrderType as
 + closed {
 +  o_orderkey : int32,
 +  o_custkey : int32,
 +  o_orderstatus : string,
 +  o_totalprice : double,
 +  o_orderdate : string,
 +  o_orderpriority : string,
 +  o_clerk : string,
 +  o_shippriority : int32,
 +  o_comment : string
 +}
 +
 +create type tpch.CustomerType as
 + closed {
 +  c_custkey : int32,
 +  c_name : string,
 +  c_address : string,
 +  c_nationkey : int32,
 +  c_phone : string,
 +  c_acctbal : double,
 +  c_mktsegment : string,
 +  c_comment : string
 +}
 +
 +create  table Orders(OrderType) primary key o_orderkey;
 +
 +create  table Customer(CustomerType) primary key c_custkey;
 +
 +declare function q22_customer_tmp() {
 +(
 +    select element {'c_acctbal':c.c_acctbal,'c_custkey':c.c_custkey,'cntrycode':phone_substr}
 +    from  Customer as c
 +    with  phone_substr as tpch.substring(c.c_phone,1,2)
 +    where ((phone_substr = '13') or (phone_substr = '31') or (phone_substr = '23') or (phone_substr = '29') or (phone_substr = '30') or (phone_substr = '18') or (phone_substr = '17'))
 +)
 +};
- with  avg as tpch.avg((
++with  avg as tpch.coll_avg((
 +      select element c.c_acctbal
 +      from  Customer as c
 +      with  phone_substr as tpch.substring(c.c_phone,1,2)
 +      where ((c.c_acctbal > 0.0) and ((phone_substr = '13') or (phone_substr = '31') or (phone_substr = '23') or (phone_substr = '29') or (phone_substr = '30') or (phone_substr = '18') or (phone_substr = '17')))
 +  ))
- select element {'cntrycode':cntrycode,'numcust':tpch.count(ct),'totacctbal':tpch.sum((
++select element {'cntrycode':cntrycode,'numcust':tpch.coll_count(ct),'totacctbal':tpch.coll_sum((
 +        select element i.c_acctbal
 +        from  ct as i
 +    ))}
 +from  tpch.q22_customer_tmp() as ct
- where (tpch.count((
++where (tpch.coll_count((
 +    select element o
 +    from  Orders as o
 +    where (ct.c_custkey = o.o_custkey)
 +)) = 0)
 +group by ct.cntrycode as cntrycode
 +order by cntrycode
 +;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/optimizerts/queries_sqlpp/query-issue601.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/optimizerts/queries_sqlpp/query-issue601.sqlpp
index 5f60eb0,0000000..1c68428
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/optimizerts/queries_sqlpp/query-issue601.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/optimizerts/queries_sqlpp/query-issue601.sqlpp
@@@ -1,57 -1,0 +1,57 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +/*
 + * Description  : This test case is to verify the fix for issue601
 + * https://code.google.com/p/asterixdb/issues/detail?id=601
 + * Expected Res : SUCCESS
 + * Date         : 10th Oct 2014
 + */
 +
 +drop  database tpch if exists;
 +create  database tpch;
 +
 +use tpch;
 +
 +
 +create type tpch.LineItemType as
 + closed {
 +  l_orderkey : int32,
 +  l_partkey : int32,
 +  l_suppkey : int32,
 +  l_linenumber : int32,
 +  l_quantity : double,
 +  l_extendedprice : double,
 +  l_discount : double,
 +  l_tax : double,
 +  l_returnflag : string,
 +  l_linestatus : string,
 +  l_shipdate : string,
 +  l_commitdate : string,
 +  l_receiptdate : string,
 +  l_shipinstruct : string,
 +  l_shipmode : string,
 +  l_comment : string
 +}
 +
 +create  table LineItem(LineItemType) primary key l_orderkey,l_linenumber;
 +
- select element {'l_linenumber':l_linenumber,'count_order':tpch.count(l)}
++select element {'l_linenumber':l_linenumber,'count_order':count(l)}
 +from  LineItem as l
 +group by l.l_linenumber as l_linenumber
 +;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/optimizerts/queries_sqlpp/query-issue697.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/optimizerts/queries_sqlpp/query-issue697.sqlpp
index 5d23126,0000000..813109a
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/optimizerts/queries_sqlpp/query-issue697.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/optimizerts/queries_sqlpp/query-issue697.sqlpp
@@@ -1,47 -1,0 +1,47 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +/*
 + * Description     : This test case is to verify the fix for issue697
 + * https://code.google.com/p/asterixdb/issues/detail?id=697
 + * Expected Result : Success
 + * Date            : 16th Nov. 2014
 + */
 +
 +drop  database test if exists;
 +create  database test;
 +
 +use test;
 +
 +
 +create type test.TestType as
 + closed {
 +  key1 : int32,
 +  "value" : int32
 +}
 +
 +create  table test(TestType) primary key key1;
 +
- select element {'gid':aid,'avg':test.avg((
++select element {'gid':aid,'avg':test.coll_avg((
 +        select element j."value"
 +        from  i as j
 +        where test.not(test."is-null"(j."value"))
 +    ))}
 +from  test as i
 +group by i.key1 as aid
 +;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/optimizerts/queries_sqlpp/query-issue785.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/optimizerts/queries_sqlpp/query-issue785.sqlpp
index aecafb4,0000000..09bd754
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/optimizerts/queries_sqlpp/query-issue785.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/optimizerts/queries_sqlpp/query-issue785.sqlpp
@@@ -1,121 -1,0 +1,121 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +/*
 + * Description  : This test case is to verify the fix for issue785
 + * https://code.google.com/p/asterixdb/issues/detail?id=785
 + * Expected Res : SUCCESS
 + * Date         : 2nd Oct. 2014
 + */
 +
 +drop  database tpch if exists;
 +create  database tpch;
 +
 +use tpch;
 +
 +
 +create type tpch.OrderType as
 + closed {
 +  o_orderkey : int32,
 +  o_custkey : int32,
 +  o_orderstatus : string,
 +  o_totalprice : double,
 +  o_orderdate : string,
 +  o_orderpriority : string,
 +  o_clerk : string,
 +  o_shippriority : int32,
 +  o_comment : string
 +}
 +
 +create type tpch.CustomerType as
 + closed {
 +  c_custkey : int32,
 +  c_name : string,
 +  c_address : string,
 +  c_nationkey : int32,
 +  c_phone : string,
 +  c_acctbal : double,
 +  c_mktsegment : string,
 +  c_comment : string
 +}
 +
 +create type tpch.SupplierType as
 + closed {
 +  s_suppkey : int32,
 +  s_name : string,
 +  s_address : string,
 +  s_nationkey : int32,
 +  s_phone : string,
 +  s_acctbal : double,
 +  s_comment : string
 +}
 +
 +create type tpch.NationType as
 + closed {
 +  n_nationkey : int32,
 +  n_name : string,
 +  n_regionkey : int32,
 +  n_comment : string
 +}
 +
 +create type tpch.RegionType as
 + closed {
 +  r_regionkey : int32,
 +  r_name : string,
 +  r_comment : string
 +}
 +
 +create  table Orders(OrderType) primary key o_orderkey;
 +
 +create  table Supplier(SupplierType) primary key s_suppkey;
 +
 +create  table Region(RegionType) primary key r_regionkey;
 +
 +create  table Nation(NationType) primary key n_nationkey;
 +
 +create  table Customer(CustomerType) primary key c_custkey;
 +
 +create  table SelectedNation(NationType) primary key n_nationkey;
 +
 +with  t as (
 +      select element {'n_nationkey':nation.n_nationkey,'n_name':nation.n_name}
 +      from  Nation as nation,
 +            SelectedNation as sn
 +      where (nation.n_nationkey = sn.n_nationkey)
 +  ),
 +      X as (
 +      select element {'nation_key':nation_key,'order_date':orderdate,'sum_price':sum}
 +      from  t as n,
 +            Customer as customer,
 +            Orders as orders
 +      where ((orders.o_custkey = customer.c_custkey) and (customer.c_nationkey = n.n_nationkey))
 +      group by orders.o_orderdate as orderdate,n.n_nationkey as nation_key
-       with  sum as tpch.sum((
++      with  sum as tpch.coll_sum((
 +            select element o.o_totalprice
 +            from  orders as o
 +        ))
 +  )
 +select element {'nation_key':nation_key,'sum_price':(
 +        select element {'orderdate':y.order_date,'sum_price':y.sum_price}
 +        from  x as y
 +        order by y.sum_price desc
 +        limit 3
 +    )}
 +from  X as x
 +group by x.nation_key as nation_key
 +;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/optimizerts/queries_sqlpp/query-issue810-2.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/optimizerts/queries_sqlpp/query-issue810-2.sqlpp
index b2e4121,0000000..16655ff
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/optimizerts/queries_sqlpp/query-issue810-2.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/optimizerts/queries_sqlpp/query-issue810-2.sqlpp
@@@ -1,78 -1,0 +1,78 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +/*
 + * Description  : This test case is to verify the fix for issue810
 + * https://code.google.com/p/asterixdb/issues/detail?id=810
 + * Expected Res : SUCCESS
 + * Date         : 24th Nov. 2014
 + */
 +
 +drop  database tpch if exists;
 +create  database tpch;
 +
 +use tpch;
 +
 +
 +create type tpch.LineItemType as
 + closed {
 +  l_orderkey : int32,
 +  l_partkey : int32,
 +  l_suppkey : int32,
 +  l_linenumber : int32,
 +  l_quantity : double,
 +  l_extendedprice : double,
 +  l_discount : double,
 +  l_tax : double,
 +  l_returnflag : string,
 +  l_linestatus : string,
 +  l_shipdate : string,
 +  l_commitdate : string,
 +  l_receiptdate : string,
 +  l_shipinstruct : string,
 +  l_shipmode : string,
 +  l_comment : string
 +}
 +
 +create  table LineItem(LineItemType) primary key l_orderkey,l_linenumber;
 +
- select element {'l_returnflag':l_returnflag,'l_linestatus':l_linestatus,'count_cheaps':tpch.count(cheaps),'avg_expensive_discounts':tpch.avg(expensives),'sum_disc_prices':tpch.sum(disc_prices),'total_charges':tpch.sum(charges)}
++select element {'l_returnflag':l_returnflag,'l_linestatus':l_linestatus,'count_cheaps':tpch.coll_count(cheaps),'avg_expensive_discounts':tpch.coll_avg(expensives),'sum_disc_prices':tpch.coll_sum(disc_prices),'total_charges':tpch.coll_sum(charges)}
 +from  LineItem as l
 +where (l.l_shipdate <= '1998-09-02')
 +/* +hash */
 +group by l.l_returnflag as l_returnflag,l.l_linestatus as l_linestatus
 +with  expensives as (
 +      select element i.l_discount
 +      from  l as i
 +      where (i.l_discount <= 0.05)
 +  ),
 +      cheaps as (
 +      select element i
 +      from  l as i
 +      where (i.l_discount > 0.05)
 +  ),
 +      charges as (
 +      select element (i.l_extendedprice * (1 - i.l_discount) * (1 + i.l_tax))
 +      from  l as i
 +  ),
 +      disc_prices as (
 +      select element (i.l_extendedprice * (1 - i.l_discount))
 +      from  l as i
 +  )
 +order by l_returnflag,l_linestatus
 +;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/optimizerts/queries_sqlpp/query-issue810.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/optimizerts/queries_sqlpp/query-issue810.sqlpp
index da41e12,0000000..50760dc
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/optimizerts/queries_sqlpp/query-issue810.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/optimizerts/queries_sqlpp/query-issue810.sqlpp
@@@ -1,70 -1,0 +1,70 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +/*
 + * Description     : This test case is to verify the fix for issue810
 + * https://code.google.com/p/asterixdb/issues/detail?id=810
 + * Expected Result : Success
 + * Date            : 16th Nov. 2014
 + */
 +
 +drop  database tpch if exists;
 +create  database tpch;
 +
 +use tpch;
 +
 +
 +create type tpch.LineItemType as
 + closed {
 +  l_orderkey : int32,
 +  l_partkey : int32,
 +  l_suppkey : int32,
 +  l_linenumber : int32,
 +  l_quantity : double,
 +  l_extendedprice : double,
 +  l_discount : double,
 +  l_tax : double,
 +  l_returnflag : string,
 +  l_linestatus : string,
 +  l_shipdate : string,
 +  l_commitdate : string,
 +  l_receiptdate : string,
 +  l_shipinstruct : string,
 +  l_shipmode : string,
 +  l_comment : string
 +}
 +
 +create  table LineItem(LineItemType) primary key l_orderkey,l_linenumber;
 +
- select element {'l_returnflag':l_returnflag,'l_linestatus':l_linestatus,'count_cheaps':tpch.count(cheap),'count_expensives':tpch.count(expensive)}
++select element {'l_returnflag':l_returnflag,'l_linestatus':l_linestatus,'count_cheaps':tpch.coll_count(cheap),'count_expensives':tpch.coll_count(expensive)}
 +from  LineItem as l
 +where (l.l_shipdate <= '1998-09-02')
 +/* +hash */
 +group by l.l_returnflag as l_returnflag,l.l_linestatus as l_linestatus
 +with  cheap as (
 +      select element m
 +      from  l as m
 +      where (m.l_discount > 0.05)
 +  ),
 +      expensive as (
 +      select element a
 +      from  l as a
 +      where (a.l_discount <= 0.05)
 +  )
 +order by l_returnflag,l_linestatus
 +;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/optimizerts/queries_sqlpp/query-issue827-2.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/optimizerts/queries_sqlpp/query-issue827-2.sqlpp
index c5ad063,0000000..f7c8ddf
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/optimizerts/queries_sqlpp/query-issue827-2.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/optimizerts/queries_sqlpp/query-issue827-2.sqlpp
@@@ -1,80 -1,0 +1,80 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +/*
 + * Description  : This test case is to verify the fix for issue827
 + * https://code.google.com/p/asterixdb/issues/detail?id=827
 + * Expected Res : SUCCESS
 + * Date         : 3rd Dec. 2014
 + */
 +
 +drop  database tpch if exists;
 +create  database tpch;
 +
 +use tpch;
 +
 +
 +create type tpch.LineItemType as
 + closed {
 +  l_orderkey : int32,
 +  l_partkey : int32,
 +  l_suppkey : int32,
 +  l_linenumber : int32,
 +  l_quantity : double,
 +  l_extendedprice : double,
 +  l_discount : double,
 +  l_tax : double,
 +  l_returnflag : string,
 +  l_linestatus : string,
 +  l_shipdate : string,
 +  l_commitdate : string,
 +  l_receiptdate : string,
 +  l_shipinstruct : string,
 +  l_shipmode : string,
 +  l_comment : string
 +}
 +
 +create  table LineItem(LineItemType) primary key l_orderkey,l_linenumber;
 +
- {'sum_qty_partial':tpch.sum((
++{'sum_qty_partial':tpch.coll_sum((
 +    select element i.l_quantity
 +    from  LineItem as i
 +    where (i.l_shipdate <= '1998-09-02')
- )),'sum_base_price':tpch.sum((
++)),'sum_base_price':tpch.coll_sum((
 +    select element i.l_extendedprice
 +    from  LineItem as i
- )),'sum_disc_price':tpch.sum((
++)),'sum_disc_price':tpch.coll_sum((
 +    select element (i.l_extendedprice * (1 - i.l_discount))
 +    from  LineItem as i
- )),'sum_charge':tpch.sum((
++)),'sum_charge':tpch.coll_sum((
 +    select element (i.l_extendedprice * (1 - i.l_discount) * (1 + i.l_tax))
 +    from  LineItem as i
- )),'ave_qty':tpch.avg((
++)),'ave_qty':tpch.coll_avg((
 +    select element i.l_quantity
 +    from  LineItem as i
 +    where (i.l_shipdate <= '1998-09-02')
- )),'ave_price':tpch.avg((
++)),'ave_price':tpch.coll_avg((
 +    select element i.l_extendedprice
 +    from  LineItem as i
- )),'ave_disc':tpch.avg((
++)),'ave_disc':tpch.coll_avg((
 +    select element i.l_discount
 +    from  LineItem as i
- )),'count_order':tpch.count((
++)),'count_order':tpch.coll_count((
 +    select element l
 +    from  LineItem as l
 +))};

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/optimizerts/queries_sqlpp/split-materialization-above-join.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/optimizerts/queries_sqlpp/split-materialization-above-join.sqlpp
index 07ccedc,0000000..94bcbf1
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/optimizerts/queries_sqlpp/split-materialization-above-join.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/optimizerts/queries_sqlpp/split-materialization-above-join.sqlpp
@@@ -1,82 -1,0 +1,82 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +
 +drop  database fuzzyjoin if exists;
 +create  database fuzzyjoin;
 +
 +use fuzzyjoin;
 +
 +
 +create type fuzzyjoin.DBLPType as
 +{
 +  id : int32,
 +  dblpid : string,
 +  title : string,
 +  authors : string,
 +  misc : string
 +}
 +
 +create  table DBLP(DBLPType) primary key id;
 +
 +set "import-private-functions" "true";
 +
 +select element {'left':paperLeft,'right':paperRight,'sim':ridpair.sim}
 +from  DBLP as paperLeft,
 +      DBLP as paperRight,
 +      (
 +    select element {'idLeft':idLeft,'idRight':idRight,'sim':sim[0]}
 +    from  DBLP as paperLeft,
 +          fuzzyjoin."subset-collection"(tokensLeft,0,fuzzyjoin."prefix-len-jaccard"(lenLeft,0.500000f)) as prefixTokenLeft,
 +          DBLP as paperRight,
 +          fuzzyjoin."subset-collection"(tokensRight,0,fuzzyjoin."prefix-len-jaccard"(lenRight,0.500000f)) as prefixTokenRight
 +    with  lenLeft as fuzzyjoin.len(fuzzyjoin."counthashed-word-tokens"(paperLeft.title)),
 +          tokensLeft as (
 +          select element i
 +          from  fuzzyjoin."counthashed-word-tokens"(paperLeft.title) as tokenUnranked,
 +                (
 +              select element tokenGroupped
 +              from  DBLP as paper,
 +                    fuzzyjoin."counthashed-word-tokens"(paper.title) as token
 +              group by token as tokenGroupped
-               order by fuzzyjoin.count(paper),tokenGroupped
++              order by count(paper),tokenGroupped
 +          ) as tokenRanked at i
 +          where (tokenUnranked = tokenRanked)
 +          order by i
 +      ),
 +          lenRight as fuzzyjoin.len(fuzzyjoin."counthashed-word-tokens"(paperRight.title)),
 +          tokensRight as (
 +          select element i
 +          from  fuzzyjoin."counthashed-word-tokens"(paperRight.title) as tokenUnranked,
 +                (
 +              select element tokenGroupped
 +              from  DBLP as paper,
 +                    fuzzyjoin."counthashed-word-tokens"(paper.title) as token
 +              group by token as tokenGroupped
-               order by fuzzyjoin.count(paper),tokenGroupped
++              order by count(paper),tokenGroupped
 +          ) as tokenRanked at i
 +          where (tokenUnranked = tokenRanked)
 +          order by i
 +      ),
 +          sim as fuzzyjoin."similarity-jaccard-prefix"(lenLeft,tokensLeft,lenRight,tokensRight,prefixTokenLeft,0.500000f)
 +    where ((prefixTokenLeft = prefixTokenRight) and ((sim >= 0.500000f) and (paperLeft.id < paperRight.id)))
 +    group by paperLeft.id as idLeft,paperRight.id as idRight
 +) as ridpair
 +where ((ridpair.idLeft = paperLeft.id) and (ridpair.idRight = paperRight.id))
 +order by paperLeft.id,paperRight.id
 +;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/optimizerts/queries_sqlpp/split-materialization.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/optimizerts/queries_sqlpp/split-materialization.sqlpp
index 9f9d609,0000000..0cd244c
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/optimizerts/queries_sqlpp/split-materialization.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/optimizerts/queries_sqlpp/split-materialization.sqlpp
@@@ -1,60 -1,0 +1,60 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +
 +drop  database TinySocial if exists;
 +create  database TinySocial;
 +
 +use TinySocial;
 +
 +
 +create type TinySocial.EmploymentType as
 +{
 +  "organization-name" : string,
 +  "start-date" : date,
 +  "end-date" : date?
 +}
 +
 +create type TinySocial.FacebookUserType as
 + closed {
 +  id : int32,
 +  alias : string,
 +  name : string,
 +  "user-since" : datetime,
 +  "friend-ids" : {{int32}},
 +  employment : [EmploymentType]
 +}
 +
 +create  table FacebookUsers(FacebookUserType) primary key id;
 +
 +with  lonelyusers as (
 +      select element d
 +      from  FacebookUsers as d
-       where (TinySocial.count(d."friend-ids") < 2)
++      where (TinySocial.coll_count(d."friend-ids") < 2)
 +  ),
 +      lonelyusers2 as (
 +      select element d
 +      from  FacebookUsers as d
-       where (TinySocial.count(d."friend-ids") < 2)
++      where (TinySocial.coll_count(d."friend-ids") < 2)
 +  )
 +select element {'user1':{'id':l1.id,'name':l1.name},'user2':{'id':l2.id,'name':l2.name}}
 +from  lonelyusers as l1,
 +      lonelyusers2 as l2
 +where (l1.id < l2.id)
 +order by l1.id,l2.id
 +;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries/external-indexing/rc-format/rc-format.1.ddl.aql
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries/external-indexing/rc-format/rc-format.1.ddl.aql
index 5a7294c,0000000..4ffc5a7
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries/external-indexing/rc-format/rc-format.1.ddl.aql
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries/external-indexing/rc-format/rc-format.1.ddl.aql
@@@ -1,41 -1,0 +1,46 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +/*
 +* Description  : Create an external dataset that contains records stored with rc hdfs file format.
 +                 Build an index over the external dataset age attribute
 +                 Perform a query over the dataset using the index.
 +* Expected Res : Success
 +* Date         : 3rd Jan 2014
 +*/
 +drop dataverse test if exists;
 +create dataverse test;
 +
 +use dataverse test;
 +
 +create type EmployeeType as closed {
 + id: int64,
 + name: string,
 + age: int64
 +};
 +
 +create external dataset EmployeeDataset(EmployeeType)
 +using hdfs
- (("hdfs"="hdfs://127.0.0.1:31888"),("path"="/asterix/external-indexing-test.rc"),("input-format"="rc-input-format"),("format"="binary"),("parser"="hive-parser"),("hive-serde"="org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe"));
++(("hdfs"="hdfs://127.0.0.1:31888"),
++("path"="/asterix/external-indexing-test.rc"),
++("input-format"="rc-input-format"),
++("format"="hdfs-writable"),
++("parser"="hive-parser"),
++("hive-serde"="org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe"));
 +
 +create index EmployeeAgeIdx on EmployeeDataset(age);

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries/hdfs/large-record/large-record.1.ddl.aql
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries/hdfs/large-record/large-record.1.ddl.aql
index 000ef5b,0000000..4e306b3
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries/hdfs/large-record/large-record.1.ddl.aql
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries/hdfs/large-record/large-record.1.ddl.aql
@@@ -1,45 -1,0 +1,45 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +/*
 +* Description  : Create an external dataset that contains a very large record
 +                 stored with text hdfs file format.
 +                 Perform a query over the dataset.
 +* Expected Res : Success
 +* Date         : 3rd Jan 2016
 +*/
 +drop dataverse test if exists;
 +create dataverse test;
 +
 +use dataverse test;
 +
 +create type EmployeeType as closed {
 + id: int64,
 + name: string,
 + age: int64
 +};
 +
 +create external dataset EmployeeDataset(EmployeeType)
 +using adapter
 +(("reader"="hdfs"),
- ("parser"="delimited-text"),
++("format"="delimited-text"),
 +("hdfs"="hdfs://127.0.0.1:31888"),
 +("path"="/asterix/large-record.txt"),
 +("input-format"="text-input-format"),
 +("delimiter"="|"));
 +

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/agg_null/agg_null.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/agg_null/agg_null.3.query.sqlpp
index 4529b3c,0000000..68c00f0
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/agg_null/agg_null.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/agg_null/agg_null.3.query.sqlpp
@@@ -1,25 -1,0 +1,25 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +/*
 +* Description  : Run aggregates over both ordered list and unordered list with only null items.
 +* Expected Res : Success
 +* Date         : Feb 7th 2014
 +*/
 +
- {'sql-count1':"sql-count"([null]),'average1':"sql-avg"([null]),'sql-sum1':"sql-sum"([null]),'sql-min1':"sql-min"([null]),'sql-max1':"sql-max"([null]),'sql-count2':"sql-count"({{null,null}}),'average2':"sql-avg"({{null,null}}),'sql-sum2':"sql-sum"({{null,null}}),'sql-min2':"sql-min"({{null,null}}),'sql-max2':"sql-max"({{null,null}})};
++{'sql-count1':"coll_sql-count"([null]),'average1':"coll_sql-avg"([null]),'sql-sum1':"coll_sql-sum"([null]),'sql-min1':"coll_sql-min"([null]),'sql-max1':"coll_sql-max"([null]),'sql-count2':"coll_sql-count"({{null,null}}),'average2':"coll_sql-avg"({{null,null}}),'sql-sum2':"coll_sql-sum"({{null,null}}),'sql-min2':"coll_sql-min"({{null,null}}),'sql-max2':"coll_sql-max"({{null,null}})};

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/agg_null_rec/agg_null_rec.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/agg_null_rec/agg_null_rec.3.query.sqlpp
index f720a2c,0000000..83037b7
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/agg_null_rec/agg_null_rec.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/agg_null_rec/agg_null_rec.3.query.sqlpp
@@@ -1,43 -1,0 +1,43 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +/*
 +* Description  : Run aggregates over records, with only null items for the aggregating fields.
 +* Expected Res : Success
 +* Date         : Feb 7th 2014
 +*/
 +
 +use test;
 +
 +
- {'sql-count':test."sql-count"((
++{'sql-count':test."coll_sql-count"((
 +    select element t.valplus
 +    from  Test as t
- )),'average':test."sql-avg"((
++)),'average':test."coll_sql-avg"((
 +    select element t.valplus
 +    from  Test as t
- )),'sql-sum':test."sql-sum"((
++)),'sql-sum':test."coll_sql-sum"((
 +    select element t.valplus
 +    from  Test as t
- )),'sql-min':test."sql-min"((
++)),'sql-min':test."coll_sql-min"((
 +    select element t.valplus
 +    from  Test as t
- )),'sql-max':test."sql-max"((
++)),'sql-max':test."coll_sql-max"((
 +    select element t.valplus
 +    from  Test as t
 +))};

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/agg_null_rec_1/agg_null_rec_1.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/agg_null_rec_1/agg_null_rec_1.3.query.sqlpp
index bec4d87,0000000..0f32271
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/agg_null_rec_1/agg_null_rec_1.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/agg_null_rec_1/agg_null_rec_1.3.query.sqlpp
@@@ -1,55 -1,0 +1,55 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +/*
 +* Description  : Run aggregates over records, with only null items for the aggregating fields.
 +* Expected Res : Success
 +* Date         : Feb 7th 2014
 +*/
 +
 +use test;
 +
 +
- {'sql-count':test."sql-count"((
++{'sql-count':test."coll_sql-count"((
 +    select element t
 +    from  Test as t
- )),'average':test."sql-avg"((
++)),'average':test."coll_sql-avg"((
 +    select element i.val
 +    from  (
 +        select element t
 +        from  Test as t
 +    ) as i
- )),'sql-sum':test."sql-sum"((
++)),'sql-sum':test."coll_sql-sum"((
 +    select element i.val
 +    from  (
 +        select element t
 +        from  Test as t
 +    ) as i
- )),'sql-min':test."sql-min"((
++)),'sql-min':test."coll_sql-min"((
 +    select element i.valplus
 +    from  (
 +        select element t
 +        from  Test as t
 +    ) as i
- )),'sql-max':test."sql-max"((
++)),'sql-max':test."coll_sql-max"((
 +    select element i.valplus
 +    from  (
 +        select element t
 +        from  Test as t
 +    ) as i
 +))};

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/agg_number/agg_number.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/agg_number/agg_number.3.query.sqlpp
index dc464a5,0000000..4f13bd4
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/agg_number/agg_number.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/agg_number/agg_number.3.query.sqlpp
@@@ -1,25 -1,0 +1,25 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +/*
 +* Description  : Run aggregates over an ordered list with numbers of different types
 +* Expected Res : Success
 +* Date         : Feb 7th 2014
 +*/
 +
- {'sql-count1':"sql-count"([float('2.0'),double('3.0'),93847382783847382,1]),'average1':"sql-avg"([float('2.0'),double('3.0'),93847382783847382,1]),'sql-sum1':"sql-sum"([float('2.0'),double('3.0'),93847382783847382,1]),'sql-min1':"sql-min"([float('2.0'),double('3.0'),93847382783847382,1]),'sql-max1':"sql-max"([float('2.0'),double('3.0'),93847382783847382,1]),'sql-count2':"sql-count"({{float('2.0'),double('3.0'),93847382783847382,1}}),'average2':"sql-avg"({{float('2.0'),double('3.0'),93847382783847382,1}}),'sql-sum2':"sql-sum"({{float('2.0'),double('3.0'),93847382783847382,1}}),'sql-min2':"sql-min"({{float('2.0'),double('3.0'),93847382783847382,1}}),'sql-max2':"sql-max"({{float('2.0'),double('3.0'),93847382783847382,1}})};
++{'sql-count1':"coll_sql-count"([float('2.0'),double('3.0'),93847382783847382,1]),'average1':"coll_sql-avg"([float('2.0'),double('3.0'),93847382783847382,1]),'sql-sum1':"coll_sql-sum"([float('2.0'),double('3.0'),93847382783847382,1]),'sql-min1':"coll_sql-min"([float('2.0'),double('3.0'),93847382783847382,1]),'sql-max1':"coll_sql-max"([float('2.0'),double('3.0'),93847382783847382,1]),'sql-count2':"coll_sql-count"({{float('2.0'),double('3.0'),93847382783847382,1}}),'average2':"coll_sql-avg"({{float('2.0'),double('3.0'),93847382783847382,1}}),'sql-sum2':"coll_sql-sum"({{float('2.0'),double('3.0'),93847382783847382,1}}),'sql-min2':"coll_sql-min"({{float('2.0'),double('3.0'),93847382783847382,1}}),'sql-max2':"coll_sql-max"({{float('2.0'),double('3.0'),93847382783847382,1}})};

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/agg_number_rec/agg_number_rec.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/agg_number_rec/agg_number_rec.3.query.sqlpp
index f57bba5,0000000..e93edc6
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/agg_number_rec/agg_number_rec.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/agg_number_rec/agg_number_rec.3.query.sqlpp
@@@ -1,43 -1,0 +1,43 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +/*
 +* Description  : Run aggregates over records, with different numeric typed items for the aggregating fields.
 +* Expected Res : Success
 +* Date         : Feb 7th 2014
 +*/
 +
 +use test;
 +
 +
- {'sql-count':test."sql-count"((
++{'sql-count':test."coll_sql-count"((
 +    select element t.valplus
 +    from  Test as t
- )),'average':test."sql-avg"((
++)),'average':test."coll_sql-avg"((
 +    select element t.valplus
 +    from  Test as t
- )),'sql-sum':test."sql-sum"((
++)),'sql-sum':test."coll_sql-sum"((
 +    select element t.valplus
 +    from  Test as t
- )),'sql-min':test."sql-min"((
++)),'sql-min':test."coll_sql-min"((
 +    select element t.valplus
 +    from  Test as t
- )),'sql-max':test."sql-max"((
++)),'sql-max':test."coll_sql-max"((
 +    select element t.valplus
 +    from  Test as t
 +))};

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_double/avg_double.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_double/avg_double.3.query.sqlpp
index f6db917,0000000..0971e7d
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_double/avg_double.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_double/avg_double.3.query.sqlpp
@@@ -1,23 -1,0 +1,23 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +
- select element "sql-avg"((
++select element "coll_sql-avg"((
 +    select element x
 +    from  [1.0,2.0,double('3.0')] as x
 +));

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_double_null/avg_double_null.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_double_null/avg_double_null.3.query.sqlpp
index 9f70bcb,0000000..192c5c4
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_double_null/avg_double_null.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_double_null/avg_double_null.3.query.sqlpp
@@@ -1,26 -1,0 +1,26 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +
 +use test;
 +
 +
- {'average':test."sql-avg"((
++{'average':test."coll_sql-avg"((
 +    select element x.doubleField
 +    from  Numeric as x
 +))};

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_empty_01/avg_empty_01.1.ddl.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_empty_01/avg_empty_01.1.ddl.sqlpp
index a71e263,0000000..dd878f9
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_empty_01/avg_empty_01.1.ddl.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_empty_01/avg_empty_01.1.ddl.sqlpp
@@@ -1,27 -1,0 +1,27 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +/*
-  * Description    : Tests that sql-avg aggregation correctly returns null for an empty stream,
++ * Description    : Tests that coll_sql-avg aggregation correctly returns null for an empty stream,
 + *                  without an aggregate combiner.
 + * Success        : Yes
 + */
 +
 +drop  database test if exists;
 +create  database test;
 +

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_empty_01/avg_empty_01.2.update.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_empty_01/avg_empty_01.2.update.sqlpp
index ddc1233,0000000..d5b1944
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_empty_01/avg_empty_01.2.update.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_empty_01/avg_empty_01.2.update.sqlpp
@@@ -1,24 -1,0 +1,24 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +/*
-  * Description    : Tests that sql-avg aggregation correctly returns null for an empty stream,
++ * Description    : Tests that coll_sql-avg aggregation correctly returns null for an empty stream,
 + *                  without an aggregate combiner.
 + * Success        : Yes
 + */
 +



[12/50] [abbrv] incubator-asterixdb git commit: Merge branch 'master' into hyracks-merge2

Posted by im...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-external-data/src/test/java/org/apache/asterix/external/parser/test/RecordWithMetaTest.java
----------------------------------------------------------------------
diff --cc asterixdb/asterix-external-data/src/test/java/org/apache/asterix/external/parser/test/RecordWithMetaTest.java
index 851a7e0,0000000..faff9df
mode 100644,000000..100644
--- a/asterixdb/asterix-external-data/src/test/java/org/apache/asterix/external/parser/test/RecordWithMetaTest.java
+++ b/asterixdb/asterix-external-data/src/test/java/org/apache/asterix/external/parser/test/RecordWithMetaTest.java
@@@ -1,153 -1,0 +1,152 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +package org.apache.asterix.external.parser.test;
 +
 +import java.io.File;
 +import java.io.PrintStream;
 +import java.nio.file.Files;
++import java.nio.file.Path;
 +import java.nio.file.Paths;
 +import java.util.ArrayList;
 +import java.util.List;
 +
 +import org.apache.asterix.external.api.IRawRecord;
 +import org.apache.asterix.external.input.record.converter.CSVToRecordWithMetadataAndPKConverter;
 +import org.apache.asterix.external.input.record.reader.stream.QuotedLineRecordReader;
 +import org.apache.asterix.external.input.stream.LocalFSInputStream;
 +import org.apache.asterix.external.parser.ADMDataParser;
 +import org.apache.asterix.external.parser.RecordWithMetadataParser;
 +import org.apache.asterix.external.util.ExternalDataConstants;
++import org.apache.asterix.external.util.FileSystemWatcher;
 +import org.apache.asterix.formats.nontagged.AqlADMPrinterFactoryProvider;
 +import org.apache.asterix.formats.nontagged.AqlSerializerDeserializerProvider;
 +import org.apache.asterix.om.types.ARecordType;
 +import org.apache.asterix.om.types.BuiltinType;
 +import org.apache.asterix.om.types.IAType;
 +import org.apache.commons.io.FileUtils;
 +import org.apache.hyracks.algebricks.data.IPrinter;
 +import org.apache.hyracks.algebricks.data.IPrinterFactory;
 +import org.apache.hyracks.api.dataflow.value.ISerializerDeserializer;
 +import org.apache.hyracks.api.exceptions.HyracksDataException;
- import org.apache.hyracks.api.io.FileReference;
 +import org.apache.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
- import org.apache.hyracks.dataflow.std.file.FileSplit;
 +import org.junit.Assert;
 +import org.junit.Test;
 +
 +public class RecordWithMetaTest {
 +    private static ARecordType recordType;
 +
 +    @SuppressWarnings({ "unchecked", "rawtypes" })
 +    @Test
 +    public void runTest() throws Exception {
 +        File file = new File("target/beer.adm");
 +        File expected = new File(getClass().getResource("/results/beer.txt").toURI().getPath());
 +        try {
 +            FileUtils.deleteQuietly(file);
 +            PrintStream printStream = new PrintStream(Files.newOutputStream(Paths.get(file.toURI())));
 +            // create key type
 +            IAType[] keyTypes = { BuiltinType.ASTRING };
 +            String keyName = "id";
 +            List<String> keyNameAsList = new ArrayList<>(1);
 +            keyNameAsList.add(keyName);
 +            // create record type
 +            String[] recordFieldNames = {};
 +            IAType[] recordFieldTypes = {};
 +            recordType = new ARecordType("value", recordFieldNames, recordFieldTypes, true);
 +            // create the meta type
 +            String[] metaFieldNames = { keyName, "flags", "expiration", "cas", "rev", "vbid", "dtype" };
 +            IAType[] metaFieldTypes = { BuiltinType.ASTRING, BuiltinType.AINT32, BuiltinType.AINT64, BuiltinType.AINT64,
 +                    BuiltinType.AINT32, BuiltinType.AINT32, BuiltinType.AINT32 };
 +            ARecordType metaType = new ARecordType("meta", metaFieldNames, metaFieldTypes, true);
 +            int valueIndex = 4;
 +            char delimiter = ',';
 +            int numOfTupleFields = 3;
 +            int[] pkIndexes = { 0 };
 +            int[] pkIndicators = { 1 };
 +
++            List<Path> paths = new ArrayList<>();
++            paths.add(Paths.get(getClass().getResource("/beer.csv").toURI()));
++            FileSystemWatcher watcher = new FileSystemWatcher(paths, null, false);
 +            // create input stream
-             LocalFSInputStream inputStream = new LocalFSInputStream(
-                     new FileSplit[] { new FileSplit(null,
-                             new FileReference(Paths.get(getClass().getResource("/beer.csv").toURI()).toFile())) },
-                     null, null, 0, null, false);
- 
++            LocalFSInputStream inputStream = new LocalFSInputStream(watcher);
 +            // create reader record reader
-             QuotedLineRecordReader lineReader = new QuotedLineRecordReader(true, inputStream, null,
++            QuotedLineRecordReader lineReader = new QuotedLineRecordReader(true, inputStream,
 +                    ExternalDataConstants.DEFAULT_QUOTE);
 +            // create csv with json record reader
 +            CSVToRecordWithMetadataAndPKConverter recordConverter = new CSVToRecordWithMetadataAndPKConverter(
 +                    valueIndex, delimiter, metaType, recordType, pkIndicators, pkIndexes, keyTypes);
 +            // create the value parser <ADM in this case>
 +            ADMDataParser valueParser = new ADMDataParser(recordType, false);
 +            // create parser.
 +            RecordWithMetadataParser parser = new RecordWithMetadataParser(metaType, valueParser, recordConverter);
 +
 +            // create serializer deserializer and printer factories
 +            ISerializerDeserializer[] serdes = new ISerializerDeserializer[keyTypes.length + 2];
 +            IPrinterFactory[] printerFactories = new IPrinterFactory[keyTypes.length + 2];
 +            for (int i = 0; i < keyTypes.length; i++) {
 +                serdes[i + 2] = AqlSerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(keyTypes[i]);
 +                printerFactories[i + 2] = AqlADMPrinterFactoryProvider.INSTANCE.getPrinterFactory(keyTypes[i]);
 +            }
 +            serdes[0] = AqlSerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(recordType);
 +            serdes[1] = AqlSerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(metaType);
 +            printerFactories[0] = AqlADMPrinterFactoryProvider.INSTANCE.getPrinterFactory(recordType);
 +            printerFactories[1] = AqlADMPrinterFactoryProvider.INSTANCE.getPrinterFactory(metaType);
 +            // create output descriptor 
 +            IPrinter[] printers = new IPrinter[printerFactories.length];
 +
 +            for (int i = 0; i < printerFactories.length; i++) {
 +                printers[i] = printerFactories[i].createPrinter();
 +            }
 +
 +            ArrayTupleBuilder tb = new ArrayTupleBuilder(numOfTupleFields);
 +            while (lineReader.hasNext()) {
 +                IRawRecord<char[]> record = lineReader.next();
 +                tb.reset();
 +                parser.parse(record, tb.getDataOutput());
 +                tb.addFieldEndOffset();
 +                parser.parseMeta(tb.getDataOutput());
 +                tb.addFieldEndOffset();
 +                parser.appendPK(tb);
 +                //print tuple
 +                printTuple(tb, printers, printStream);
 +
 +            }
 +            lineReader.close();
 +            printStream.close();
 +            Assert.assertTrue(FileUtils.contentEquals(file, expected));
 +        } catch (Throwable th) {
 +            System.err.println("TEST FAILED");
 +            th.printStackTrace();
 +            throw th;
 +        } finally {
 +            FileUtils.deleteQuietly(file);
 +        }
 +        System.err.println("TEST PASSED.");
 +    }
 +
 +    private void printTuple(ArrayTupleBuilder tb, IPrinter[] printers, PrintStream printStream)
 +            throws HyracksDataException {
 +        int[] offsets = tb.getFieldEndOffsets();
 +        for (int i = 0; i < printers.length; i++) {
 +            int offset = i == 0 ? 0 : offsets[i - 1];
 +            int length = i == 0 ? offsets[0] : offsets[i] - offsets[i - 1];
 +            printers[i].print(tb.getByteArray(), offset, length, printStream);
 +            printStream.println();
 +        }
 +    }
 +}


[48/50] [abbrv] incubator-asterixdb git commit: Merge remote-tracking branch 'hyracks-local/master' into hyracks-merge2

Posted by im...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e928b6ac/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/ExternalIndexHarness.java
----------------------------------------------------------------------
diff --cc hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/ExternalIndexHarness.java
index 8e39223,0000000..e4be66b
mode 100644,000000..100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/ExternalIndexHarness.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/ExternalIndexHarness.java
@@@ -1,368 -1,0 +1,379 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +package org.apache.hyracks.storage.am.lsm.common.impls;
 +
 +import java.util.List;
 +import java.util.logging.Level;
 +import java.util.logging.Logger;
 +
 +import org.apache.hyracks.api.exceptions.HyracksDataException;
 +import org.apache.hyracks.api.replication.IReplicationJob.ReplicationOperation;
 +import org.apache.hyracks.dataflow.common.data.accessors.ITupleReference;
 +import org.apache.hyracks.storage.am.common.api.IIndexCursor;
 +import org.apache.hyracks.storage.am.common.api.ISearchPredicate;
 +import org.apache.hyracks.storage.am.common.api.IndexException;
 +import org.apache.hyracks.storage.am.common.ophelpers.IndexOperation;
 +import org.apache.hyracks.storage.am.lsm.common.api.ILSMComponent;
 +import org.apache.hyracks.storage.am.lsm.common.api.ILSMIOOperation;
 +import org.apache.hyracks.storage.am.lsm.common.api.ILSMIOOperationCallback;
 +import org.apache.hyracks.storage.am.lsm.common.api.ILSMIndexInternal;
 +import org.apache.hyracks.storage.am.lsm.common.api.ILSMIndexOperationContext;
 +import org.apache.hyracks.storage.am.lsm.common.api.ILSMMergePolicy;
 +import org.apache.hyracks.storage.am.lsm.common.api.ILSMOperationTracker;
 +import org.apache.hyracks.storage.am.lsm.common.api.ITwoPCIndex;
 +import org.apache.hyracks.storage.am.lsm.common.api.LSMOperationType;
 +
 +public class ExternalIndexHarness extends LSMHarness {
 +    private static final Logger LOGGER = Logger.getLogger(ExternalIndexHarness.class.getName());
 +
-     public ExternalIndexHarness(ILSMIndexInternal lsmIndex, ILSMMergePolicy mergePolicy,
-             ILSMOperationTracker opTracker, boolean replicationEnabled) {
++    public ExternalIndexHarness(ILSMIndexInternal lsmIndex, ILSMMergePolicy mergePolicy, ILSMOperationTracker opTracker,
++            boolean replicationEnabled) {
 +        super(lsmIndex, mergePolicy, opTracker, replicationEnabled);
 +    }
 +
 +    @Override
 +    protected boolean getAndEnterComponents(ILSMIndexOperationContext ctx, LSMOperationType opType,
 +            boolean isTryOperation) throws HyracksDataException {
++        validateOperationEnterComponentsState(ctx);
 +        synchronized (opTracker) {
 +            while (true) {
 +                lsmIndex.getOperationalComponents(ctx);
 +                // Before entering the components, prune those corner cases that indeed should not proceed.
 +                switch (opType) {
 +                    case MERGE:
 +                        if (ctx.getComponentHolder().size() < 2) {
 +                            // There is only a single component. There is nothing to merge.
 +                            return false;
 +                        }
 +                    default:
 +                        break;
 +                }
 +                if (enterComponents(ctx, opType)) {
 +                    return true;
 +                } else if (isTryOperation) {
 +                    return false;
 +                }
 +            }
 +        }
 +    }
 +
 +    @Override
 +    protected boolean enterComponents(ILSMIndexOperationContext ctx, LSMOperationType opType)
 +            throws HyracksDataException {
++        validateOperationEnterComponentsState(ctx);
 +        List<ILSMComponent> components = ctx.getComponentHolder();
 +        int numEntered = 0;
 +        boolean entranceSuccessful = false;
 +        try {
 +            for (ILSMComponent c : components) {
 +                if (!c.threadEnter(opType, false)) {
 +                    break;
 +                }
 +                numEntered++;
 +            }
 +            entranceSuccessful = numEntered == components.size();
 +        } finally {
 +            if (!entranceSuccessful) {
 +                for (ILSMComponent c : components) {
 +                    if (numEntered == 0) {
 +                        break;
 +                    }
 +                    c.threadExit(opType, true, false);
 +                    numEntered--;
 +                }
 +                return false;
 +            }
++            ctx.setAccessingComponents(true);
 +        }
 +        // Check if there is any action that is needed to be taken based on the operation type
 +        switch (opType) {
 +            case MERGE:
 +                lsmIndex.getIOOperationCallback().beforeOperation(LSMOperationType.MERGE);
 +            default:
 +                break;
 +        }
 +        opTracker.beforeOperation(lsmIndex, opType, ctx.getSearchOperationCallback(), ctx.getModificationCallback());
 +        return true;
 +    }
 +
 +    private void exitComponents(ILSMIndexOperationContext ctx, LSMOperationType opType, ILSMComponent newComponent,
 +            boolean failedOperation) throws HyracksDataException, IndexException {
++        /**
++         * FLUSH and MERGE operations should always exit the components
++         * to notify waiting threads.
++         */
++        if (!ctx.isAccessingComponents() && opType != LSMOperationType.FLUSH && opType != LSMOperationType.MERGE) {
++            return;
++        }
 +        synchronized (opTracker) {
 +            try {
 +                // First check if there is any action that is needed to be taken based on the state of each component.
 +                for (ILSMComponent c : ctx.getComponentHolder()) {
 +                    c.threadExit(opType, failedOperation, false);
 +                    switch (c.getState()) {
 +                        case INACTIVE:
 +                            if (replicationEnabled) {
 +                                componentsToBeReplicated.clear();
 +                                componentsToBeReplicated.add(c);
 +                                lsmIndex.scheduleReplication(null, componentsToBeReplicated, false,
 +                                        ReplicationOperation.DELETE, opType);
 +                            }
 +                            ((AbstractDiskLSMComponent) c).destroy();
 +                            break;
 +                        default:
 +                            break;
 +                    }
 +                }
++                ctx.setAccessingComponents(false);
 +                // Then, perform any action that is needed to be taken based on the operation type.
 +                switch (opType) {
 +                    case MERGE:
 +                        // newComponent is null if the merge op. was not performed.
 +                        if (newComponent != null) {
 +                            beforeSubsumeMergedComponents(newComponent, ctx.getComponentHolder());
 +                            lsmIndex.subsumeMergedComponents(newComponent, ctx.getComponentHolder());
 +                            if (replicationEnabled) {
 +                                componentsToBeReplicated.clear();
 +                                componentsToBeReplicated.add(newComponent);
 +                                triggerReplication(componentsToBeReplicated, false, opType);
 +                            }
 +                            mergePolicy.diskComponentAdded(lsmIndex, fullMergeIsRequested.get());
 +                        }
 +                        break;
 +                    default:
 +                        break;
 +                }
 +            } finally {
 +                opTracker.afterOperation(lsmIndex, opType, ctx.getSearchOperationCallback(),
 +                        ctx.getModificationCallback());
 +            }
 +        }
 +    }
 +
 +    @Override
-     public void forceModify(ILSMIndexOperationContext ctx, ITupleReference tuple) throws HyracksDataException,
-             IndexException {
++    public void forceModify(ILSMIndexOperationContext ctx, ITupleReference tuple)
++            throws HyracksDataException, IndexException {
 +        throw new IndexException("2PC LSM Inedx doesn't support modify");
 +    }
 +
 +    @Override
 +    public boolean modify(ILSMIndexOperationContext ctx, boolean tryOperation, ITupleReference tuple)
 +            throws HyracksDataException, IndexException {
 +        throw new IndexException("2PC LSM Inedx doesn't support modify");
 +    }
 +
 +    @Override
 +    public void search(ILSMIndexOperationContext ctx, IIndexCursor cursor, ISearchPredicate pred)
 +            throws HyracksDataException, IndexException {
 +        LSMOperationType opType = LSMOperationType.SEARCH;
 +        getAndEnterComponents(ctx, opType, false);
 +        try {
 +            lsmIndex.search(ctx, cursor, pred);
 +        } catch (HyracksDataException | IndexException e) {
 +            exitComponents(ctx, opType, null, true);
 +            throw e;
 +        }
 +    }
 +
 +    @Override
 +    public void endSearch(ILSMIndexOperationContext ctx) throws HyracksDataException {
 +        if (ctx.getOperation() == IndexOperation.SEARCH) {
 +            try {
 +                exitComponents(ctx, LSMOperationType.SEARCH, null, false);
 +            } catch (IndexException e) {
 +                throw new HyracksDataException(e);
 +            }
 +        }
 +    }
 +
 +    @Override
 +    public void scheduleMerge(ILSMIndexOperationContext ctx, ILSMIOOperationCallback callback)
 +            throws HyracksDataException, IndexException {
 +        if (!getAndEnterComponents(ctx, LSMOperationType.MERGE, true)) {
 +            callback.afterFinalize(LSMOperationType.MERGE, null);
 +            return;
 +        }
 +        lsmIndex.scheduleMerge(ctx, callback);
 +    }
 +
 +    @Override
 +    public void scheduleFullMerge(ILSMIndexOperationContext ctx, ILSMIOOperationCallback callback)
 +            throws HyracksDataException, IndexException {
 +        fullMergeIsRequested.set(true);
 +        if (!getAndEnterComponents(ctx, LSMOperationType.MERGE, true)) {
 +            // If the merge cannot be scheduled because there is already an ongoing merge on subset/all of the components, then
 +            // whenever the current merge has finished, it will schedule the full merge again.
 +            callback.afterFinalize(LSMOperationType.MERGE, null);
 +            return;
 +        }
 +        fullMergeIsRequested.set(false);
 +        lsmIndex.scheduleMerge(ctx, callback);
 +    }
 +
 +    @Override
-     public void merge(ILSMIndexOperationContext ctx, ILSMIOOperation operation) throws HyracksDataException,
-             IndexException {
++    public void merge(ILSMIndexOperationContext ctx, ILSMIOOperation operation)
++            throws HyracksDataException, IndexException {
 +        if (LOGGER.isLoggable(Level.INFO)) {
 +            LOGGER.info("Started a merge operation for index: " + lsmIndex + " ...");
 +        }
 +
 +        ILSMComponent newComponent = null;
 +        try {
 +            newComponent = lsmIndex.merge(operation);
 +            operation.getCallback().afterOperation(LSMOperationType.MERGE, ctx.getComponentHolder(), newComponent);
 +            lsmIndex.markAsValid(newComponent);
 +        } finally {
 +            exitComponents(ctx, LSMOperationType.MERGE, newComponent, false);
 +            operation.getCallback().afterFinalize(LSMOperationType.MERGE, newComponent);
 +        }
 +        if (LOGGER.isLoggable(Level.INFO)) {
 +            LOGGER.info("Finished the merge operation for index: " + lsmIndex);
 +        }
 +    }
 +
 +    @Override
 +    public void addBulkLoadedComponent(ILSMComponent c) throws HyracksDataException, IndexException {
 +        lsmIndex.markAsValid(c);
 +        synchronized (opTracker) {
 +            lsmIndex.addComponent(c);
 +            if (replicationEnabled) {
 +                componentsToBeReplicated.clear();
 +                componentsToBeReplicated.add(c);
 +                triggerReplication(componentsToBeReplicated, true, LSMOperationType.MERGE);
 +            }
 +            // Enter the component
 +            enterComponent(c);
 +            mergePolicy.diskComponentAdded(lsmIndex, false);
 +        }
 +    }
 +
 +    // Three differences from  addBulkLoadedComponent
 +    // 1. this needs synchronization since others might be accessing the index (specifically merge operations that might change the lists of components)
 +    // 2. the actions taken by the index itself are different
 +    // 3. the component has already been marked valid by the bulk update operation
 +    public void addTransactionComponents(ILSMComponent newComponent) throws HyracksDataException, IndexException {
 +        ITwoPCIndex index = (ITwoPCIndex) lsmIndex;
 +        synchronized (opTracker) {
 +            List<ILSMComponent> newerList;
 +            List<ILSMComponent> olderList;
 +            if (index.getCurrentVersion() == 0) {
 +                newerList = index.getFirstComponentList();
 +                olderList = index.getSecondComponentList();
 +            } else {
 +                newerList = index.getSecondComponentList();
 +                olderList = index.getFirstComponentList();
 +            }
 +            // Exit components in old version of the index so they are ready to be
 +            // deleted if they are not needed anymore
 +            for (ILSMComponent c : olderList) {
 +                exitComponent(c);
 +            }
 +            // Enter components in the newer list
 +            for (ILSMComponent c : newerList) {
 +                enterComponent(c);
 +            }
 +            if (newComponent != null) {
 +                // Enter new component
 +                enterComponent(newComponent);
 +            }
 +            index.commitTransactionDiskComponent(newComponent);
 +            mergePolicy.diskComponentAdded(lsmIndex, fullMergeIsRequested.get());
 +        }
 +    }
 +
 +    @Override
 +    public void scheduleFlush(ILSMIndexOperationContext ctx, ILSMIOOperationCallback callback)
 +            throws HyracksDataException {
 +        callback.afterFinalize(LSMOperationType.FLUSH, null);
 +    }
 +
 +    @Override
-     public void flush(ILSMIndexOperationContext ctx, ILSMIOOperation operation) throws HyracksDataException,
-             IndexException {
++    public void flush(ILSMIndexOperationContext ctx, ILSMIOOperation operation)
++            throws HyracksDataException, IndexException {
 +    }
 +
 +    @Override
 +    public ILSMOperationTracker getOperationTracker() {
 +        return opTracker;
 +    }
 +
 +    public void beforeSubsumeMergedComponents(ILSMComponent newComponent, List<ILSMComponent> mergedComponents)
 +            throws HyracksDataException {
 +        ITwoPCIndex index = (ITwoPCIndex) lsmIndex;
 +        // check if merge will affect the first list
 +        if (index.getFirstComponentList().containsAll(mergedComponents)) {
 +            // exit un-needed components
 +            for (ILSMComponent c : mergedComponents) {
 +                exitComponent(c);
 +            }
 +            // enter new component
 +            enterComponent(newComponent);
 +        }
 +        // check if merge will affect the second list
 +        if (index.getSecondComponentList().containsAll(mergedComponents)) {
 +            // exit un-needed components
 +            for (ILSMComponent c : mergedComponents) {
 +                exitComponent(c);
 +            }
 +            // enter new component
 +            enterComponent(newComponent);
 +        }
 +    }
 +
 +    // The two methods: enterComponent and exitComponent are used to control
 +    // when components are to be deleted from disk
 +    private void enterComponent(ILSMComponent diskComponent) throws HyracksDataException {
 +        diskComponent.threadEnter(LSMOperationType.SEARCH, false);
 +    }
 +
 +    private void exitComponent(ILSMComponent diskComponent) throws HyracksDataException {
 +        diskComponent.threadExit(LSMOperationType.SEARCH, false, false);
 +        if (diskComponent.getState() == ILSMComponent.ComponentState.INACTIVE) {
 +            if (replicationEnabled) {
 +                componentsToBeReplicated.clear();
 +                componentsToBeReplicated.add(diskComponent);
 +                lsmIndex.scheduleReplication(null, componentsToBeReplicated, false, ReplicationOperation.DELETE, null);
 +            }
 +            ((AbstractDiskLSMComponent) diskComponent).destroy();
 +        }
 +    }
 +
 +    public void indexFirstTimeActivated() throws HyracksDataException {
 +        ITwoPCIndex index = (ITwoPCIndex) lsmIndex;
 +        // Enter disk components <-- To avoid deleting them when they are
 +        // still needed-->
 +        for (ILSMComponent c : index.getFirstComponentList()) {
 +            enterComponent(c);
 +        }
 +        for (ILSMComponent c : index.getSecondComponentList()) {
 +            enterComponent(c);
 +        }
 +    }
 +
 +    public void indexClear() throws HyracksDataException {
 +        ITwoPCIndex index = (ITwoPCIndex) lsmIndex;
 +        for (ILSMComponent c : index.getFirstComponentList()) {
 +            exitComponent(c);
 +        }
 +        for (ILSMComponent c : index.getSecondComponentList()) {
 +            exitComponent(c);
 +        }
 +    }
 +
 +}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e928b6ac/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/LSMHarness.java
----------------------------------------------------------------------
diff --cc hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/LSMHarness.java
index 21b0d8a,0000000..a19532f
mode 100644,000000..100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/LSMHarness.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/LSMHarness.java
@@@ -1,508 -1,0 +1,526 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +
 +package org.apache.hyracks.storage.am.lsm.common.impls;
 +
 +import java.util.ArrayList;
 +import java.util.LinkedList;
 +import java.util.List;
 +import java.util.concurrent.atomic.AtomicBoolean;
 +import java.util.logging.Level;
 +import java.util.logging.Logger;
 +
 +import org.apache.hyracks.api.exceptions.HyracksDataException;
 +import org.apache.hyracks.api.replication.IReplicationJob.ReplicationOperation;
 +import org.apache.hyracks.dataflow.common.data.accessors.ITupleReference;
 +import org.apache.hyracks.storage.am.common.api.IIndexCursor;
 +import org.apache.hyracks.storage.am.common.api.ISearchPredicate;
 +import org.apache.hyracks.storage.am.common.api.IndexException;
 +import org.apache.hyracks.storage.am.common.impls.NoOpOperationCallback;
 +import org.apache.hyracks.storage.am.common.ophelpers.IndexOperation;
 +import org.apache.hyracks.storage.am.lsm.common.api.ILSMComponent;
 +import org.apache.hyracks.storage.am.lsm.common.api.ILSMComponent.ComponentState;
 +import org.apache.hyracks.storage.am.lsm.common.api.ILSMComponent.LSMComponentType;
 +import org.apache.hyracks.storage.am.lsm.common.api.ILSMHarness;
 +import org.apache.hyracks.storage.am.lsm.common.api.ILSMIOOperation;
 +import org.apache.hyracks.storage.am.lsm.common.api.ILSMIOOperationCallback;
 +import org.apache.hyracks.storage.am.lsm.common.api.ILSMIndexAccessorInternal;
 +import org.apache.hyracks.storage.am.lsm.common.api.ILSMIndexInternal;
 +import org.apache.hyracks.storage.am.lsm.common.api.ILSMIndexOperationContext;
 +import org.apache.hyracks.storage.am.lsm.common.api.ILSMMergePolicy;
 +import org.apache.hyracks.storage.am.lsm.common.api.ILSMOperationTracker;
 +import org.apache.hyracks.storage.am.lsm.common.api.LSMOperationType;
 +
 +public class LSMHarness implements ILSMHarness {
 +    private static final Logger LOGGER = Logger.getLogger(LSMHarness.class.getName());
 +
 +    protected final ILSMIndexInternal lsmIndex;
 +    protected final ILSMMergePolicy mergePolicy;
 +    protected final ILSMOperationTracker opTracker;
 +    protected final AtomicBoolean fullMergeIsRequested;
 +    protected final boolean replicationEnabled;
 +    protected List<ILSMComponent> componentsToBeReplicated;
 +
 +    public LSMHarness(ILSMIndexInternal lsmIndex, ILSMMergePolicy mergePolicy, ILSMOperationTracker opTracker,
 +            boolean replicationEnabled) {
 +        this.lsmIndex = lsmIndex;
 +        this.opTracker = opTracker;
 +        this.mergePolicy = mergePolicy;
 +        fullMergeIsRequested = new AtomicBoolean();
-         this.replicationEnabled = replicationEnabled;
++        //only durable indexes are replicated
++        this.replicationEnabled = replicationEnabled && lsmIndex.isDurable();
 +        if (replicationEnabled) {
 +            this.componentsToBeReplicated = new ArrayList<ILSMComponent>();
 +        }
 +    }
 +
 +    protected boolean getAndEnterComponents(ILSMIndexOperationContext ctx, LSMOperationType opType,
 +            boolean isTryOperation) throws HyracksDataException {
++        validateOperationEnterComponentsState(ctx);
 +        synchronized (opTracker) {
 +            while (true) {
 +                lsmIndex.getOperationalComponents(ctx);
 +                // Before entering the components, prune those corner cases that indeed should not proceed.
 +                switch (opType) {
 +                    case FLUSH:
 +                        ILSMComponent flushingComponent = ctx.getComponentHolder().get(0);
 +                        if (!((AbstractMemoryLSMComponent) flushingComponent).isModified()) {
 +                            //The mutable component has not been modified by any writer. There is nothing to flush.
 +                            //since the component is empty, set its state back to READABLE_WRITABLE
 +                            if (((AbstractLSMIndex) lsmIndex)
 +                                    .getCurrentMutableComponentState() == ComponentState.READABLE_UNWRITABLE) {
 +                                ((AbstractLSMIndex) lsmIndex)
 +                                        .setCurrentMutableComponentState(ComponentState.READABLE_WRITABLE);
 +                            }
 +                            return false;
 +                        }
 +                        if (((AbstractMemoryLSMComponent) flushingComponent).getWriterCount() > 0) {
 +                            /*
 +                             * This case is a case where even though FLUSH log was flushed to disk and scheduleFlush is triggered,
 +                             * the current in-memory component (whose state was changed to READABLE_WRITABLE (RW)
 +                             * from READABLE_UNWRITABLE(RU) before FLUSH log was written to log tail (which is memory buffer of log file)
 +                             * and then the state was changed back to RW (as shown in the following scenario)) can have writers
 +                             * based on the current code base/design.
 +                             * Thus, the writer count of the component may be greater than 0.
 +                             * if this happens, intead of throwing exception, scheduleFlush() deal with this situation by not flushing
 +                             * the component.
 +                             * Please see issue 884 for more detail information:
 +                             * https://code.google.com/p/asterixdb/issues/detail?id=884&q=owner%3Akisskys%40gmail.com&colspec=ID%20Type%20Status%20Priority%20Milestone%20Owner%20Summary%20ETA%20Severity
 +                             *
 +                             */
 +                            return false;
 +                        }
 +                        break;
 +                    case MERGE:
 +                        if (ctx.getComponentHolder().size() < 2) {
 +                            // There is only a single component. There is nothing to merge.
 +                            return false;
 +                        }
 +                    default:
 +                        break;
 +                }
 +                if (enterComponents(ctx, opType)) {
 +                    return true;
 +                } else if (isTryOperation) {
 +                    return false;
 +                }
 +                try {
 +                    // Flush and merge operations should never reach this wait call, because they are always try operations.
 +                    // If they fail to enter the components, then it means that there are an ongoing flush/merge operation on
 +                    // the same components, so they should not proceed.
 +                    opTracker.wait();
 +                } catch (InterruptedException e) {
 +                    throw new HyracksDataException(e);
 +                }
 +            }
 +        }
 +    }
 +
 +    protected boolean enterComponents(ILSMIndexOperationContext ctx, LSMOperationType opType)
 +            throws HyracksDataException {
++        validateOperationEnterComponentsState(ctx);
 +        List<ILSMComponent> components = ctx.getComponentHolder();
 +        int numEntered = 0;
 +        boolean entranceSuccessful = false;
 +        try {
 +            for (ILSMComponent c : components) {
 +                boolean isMutableComponent = numEntered == 0 && c.getType() == LSMComponentType.MEMORY ? true : false;
 +                if (!c.threadEnter(opType, isMutableComponent)) {
 +                    break;
 +                }
 +                numEntered++;
 +            }
 +            entranceSuccessful = numEntered == components.size();
 +        } catch (Throwable e) {
 +            e.printStackTrace();
 +            throw e;
 +        } finally {
 +            if (!entranceSuccessful) {
 +                int i = 0;
 +                for (ILSMComponent c : components) {
 +                    if (numEntered == 0) {
 +                        break;
 +                    }
 +                    boolean isMutableComponent = i == 0 && c.getType() == LSMComponentType.MEMORY ? true : false;
 +                    c.threadExit(opType, true, isMutableComponent);
 +                    i++;
 +                    numEntered--;
 +                }
 +                return false;
 +            }
++            ctx.setAccessingComponents(true);
 +        }
 +        // Check if there is any action that is needed to be taken based on the operation type
 +        switch (opType) {
 +            case FLUSH:
 +                lsmIndex.getIOOperationCallback().beforeOperation(LSMOperationType.FLUSH);
 +                // Changing the flush status should *always* precede changing the mutable component.
 +                lsmIndex.changeFlushStatusForCurrentMutableCompoent(false);
 +                lsmIndex.changeMutableComponent();
 +                // Notify all waiting threads whenever a flush has been scheduled since they will check
 +                // again if they can grab and enter the mutable component.
 +                opTracker.notifyAll();
 +                break;
 +            case MERGE:
 +                lsmIndex.getIOOperationCallback().beforeOperation(LSMOperationType.MERGE);
 +            default:
 +                break;
 +        }
 +        opTracker.beforeOperation(lsmIndex, opType, ctx.getSearchOperationCallback(), ctx.getModificationCallback());
 +        return true;
 +    }
 +
 +    private void exitComponents(ILSMIndexOperationContext ctx, LSMOperationType opType, ILSMComponent newComponent,
 +            boolean failedOperation) throws HyracksDataException, IndexException {
++        /**
++         * FLUSH and MERGE operations should always exit the components
++         * to notify waiting threads.
++         */
++        if (!ctx.isAccessingComponents() && opType != LSMOperationType.FLUSH && opType != LSMOperationType.MERGE) {
++            return;
++        }
 +        List<ILSMComponent> inactiveDiskComponents = null;
 +        List<ILSMComponent> inactiveDiskComponentsToBeDeleted = null;
 +        try {
 +            synchronized (opTracker) {
 +                try {
 +                    int i = 0;
 +                    // First check if there is any action that is needed to be taken based on the state of each component.
 +                    for (ILSMComponent c : ctx.getComponentHolder()) {
 +                        boolean isMutableComponent = i == 0 && c.getType() == LSMComponentType.MEMORY ? true : false;
 +                        c.threadExit(opType, failedOperation, isMutableComponent);
 +                        if (c.getType() == LSMComponentType.MEMORY) {
 +                            switch (c.getState()) {
 +                                case READABLE_UNWRITABLE:
 +                                    if (isMutableComponent && (opType == LSMOperationType.MODIFICATION
 +                                            || opType == LSMOperationType.FORCE_MODIFICATION)) {
 +                                        lsmIndex.changeFlushStatusForCurrentMutableCompoent(true);
 +                                    }
 +                                    break;
 +                                case INACTIVE:
 +                                    ((AbstractMemoryLSMComponent) c).reset();
 +                                    // Notify all waiting threads whenever the mutable component's has change to inactive. This is important because
 +                                    // even though we switched the mutable components, it is possible that the component that we just switched
 +                                    // to is still busy flushing its data to disk. Thus, the notification that was issued upon scheduling the flush
 +                                    // is not enough.
 +                                    opTracker.notifyAll();
 +                                    break;
 +                                default:
 +                                    break;
 +                            }
 +                        } else {
 +                            switch (c.getState()) {
 +                                case INACTIVE:
 +                                    lsmIndex.addInactiveDiskComponent(c);
 +                                    break;
 +                                default:
 +                                    break;
 +                            }
 +                        }
 +                        i++;
 +                    }
++                    ctx.setAccessingComponents(false);
 +                    // Then, perform any action that is needed to be taken based on the operation type.
 +                    switch (opType) {
 +                        case FLUSH:
 +                            // newComponent is null if the flush op. was not performed.
 +                            if (newComponent != null) {
 +                                lsmIndex.addComponent(newComponent);
 +                                if (replicationEnabled) {
 +                                    componentsToBeReplicated.clear();
 +                                    componentsToBeReplicated.add(newComponent);
 +                                    triggerReplication(componentsToBeReplicated, false, opType);
 +                                }
 +                                mergePolicy.diskComponentAdded(lsmIndex, false);
 +                            }
 +                            break;
 +                        case MERGE:
 +                            // newComponent is null if the merge op. was not performed.
 +                            if (newComponent != null) {
 +                                lsmIndex.subsumeMergedComponents(newComponent, ctx.getComponentHolder());
 +                                if (replicationEnabled) {
 +                                    componentsToBeReplicated.clear();
 +                                    componentsToBeReplicated.add(newComponent);
 +                                    triggerReplication(componentsToBeReplicated, false, opType);
 +                                }
 +                                mergePolicy.diskComponentAdded(lsmIndex, fullMergeIsRequested.get());
 +                            }
 +                            break;
 +                        default:
 +                            break;
 +                    }
 +                } catch (Throwable e) {
 +                    e.printStackTrace();
 +                    throw e;
 +                } finally {
 +                    if (failedOperation && (opType == LSMOperationType.MODIFICATION
 +                            || opType == LSMOperationType.FORCE_MODIFICATION)) {
 +                        //When the operation failed, completeOperation() method must be called
 +                        //in order to decrement active operation count which was incremented in beforeOperation() method.
 +                        opTracker.completeOperation(lsmIndex, opType, ctx.getSearchOperationCallback(),
 +                                ctx.getModificationCallback());
 +                    } else {
 +                        opTracker.afterOperation(lsmIndex, opType, ctx.getSearchOperationCallback(),
 +                                ctx.getModificationCallback());
 +                    }
 +
 +                    /*
 +                     * = Inactive disk components lazy cleanup if any =
 +                     * Prepare to cleanup inactive diskComponents which were old merged components
 +                     * and not anymore accessed.
 +                     * This cleanup is done outside of optracker synchronized block.
 +                     */
 +                    inactiveDiskComponents = lsmIndex.getInactiveDiskComponents();
 +                    if (!inactiveDiskComponents.isEmpty()) {
 +                        for (ILSMComponent inactiveComp : inactiveDiskComponents) {
 +                            if (((AbstractDiskLSMComponent) inactiveComp).getFileReferenceCount() == 1) {
 +                                if (inactiveDiskComponentsToBeDeleted == null) {
 +                                    inactiveDiskComponentsToBeDeleted = new LinkedList<ILSMComponent>();
 +                                }
 +                                inactiveDiskComponentsToBeDeleted.add(inactiveComp);
 +                            }
 +                        }
 +                        if (inactiveDiskComponentsToBeDeleted != null) {
 +                            inactiveDiskComponents.removeAll(inactiveDiskComponentsToBeDeleted);
 +                        }
 +                    }
 +                }
 +            }
 +        } finally {
 +            /*
 +             * cleanup inactive disk components if any
 +             */
 +            if (inactiveDiskComponentsToBeDeleted != null) {
 +                try {
 +                    //schedule a replication job to delete these inactive disk components from replicas
 +                    if (replicationEnabled) {
 +                        lsmIndex.scheduleReplication(null, inactiveDiskComponentsToBeDeleted, false,
 +                                ReplicationOperation.DELETE, opType);
 +                    }
 +
 +                    for (ILSMComponent c : inactiveDiskComponentsToBeDeleted) {
 +                        ((AbstractDiskLSMComponent) c).destroy();
 +                    }
 +                } catch (Throwable e) {
 +                    e.printStackTrace();
 +                    throw e;
 +                }
 +            }
 +        }
 +
 +    }
 +
 +    @Override
 +    public void forceModify(ILSMIndexOperationContext ctx, ITupleReference tuple)
 +            throws HyracksDataException, IndexException {
 +        LSMOperationType opType = LSMOperationType.FORCE_MODIFICATION;
 +        modify(ctx, false, tuple, opType);
 +    }
 +
 +    @Override
 +    public boolean modify(ILSMIndexOperationContext ctx, boolean tryOperation, ITupleReference tuple)
 +            throws HyracksDataException, IndexException {
 +        LSMOperationType opType = LSMOperationType.MODIFICATION;
 +        return modify(ctx, tryOperation, tuple, opType);
 +    }
 +
 +    private boolean modify(ILSMIndexOperationContext ctx, boolean tryOperation, ITupleReference tuple,
 +            LSMOperationType opType) throws HyracksDataException, IndexException {
 +        if (!lsmIndex.isMemoryComponentsAllocated()) {
 +            lsmIndex.allocateMemoryComponents();
 +        }
 +        boolean failedOperation = false;
 +        if (!getAndEnterComponents(ctx, opType, tryOperation)) {
 +            return false;
 +        }
 +        try {
 +            lsmIndex.modify(ctx, tuple);
 +            // The mutable component is always in the first index.
 +            AbstractMemoryLSMComponent mutableComponent = (AbstractMemoryLSMComponent) ctx.getComponentHolder().get(0);
 +            mutableComponent.setIsModified();
 +        } catch (Exception e) {
 +            failedOperation = true;
 +            throw e;
 +        } finally {
 +            exitComponents(ctx, opType, null, failedOperation);
 +        }
 +        return true;
 +    }
 +
 +    @Override
 +    public void search(ILSMIndexOperationContext ctx, IIndexCursor cursor, ISearchPredicate pred)
 +            throws HyracksDataException, IndexException {
 +        LSMOperationType opType = LSMOperationType.SEARCH;
 +        ctx.setSearchPredicate(pred);
 +        getAndEnterComponents(ctx, opType, false);
 +        try {
 +            ctx.getSearchOperationCallback().before(pred.getLowKey());
 +            lsmIndex.search(ctx, cursor, pred);
 +        } catch (HyracksDataException | IndexException e) {
 +            exitComponents(ctx, opType, null, true);
 +            throw e;
 +        }
 +    }
 +
 +    @Override
 +    public void endSearch(ILSMIndexOperationContext ctx) throws HyracksDataException {
 +        if (ctx.getOperation() == IndexOperation.SEARCH) {
 +            try {
 +                exitComponents(ctx, LSMOperationType.SEARCH, null, false);
 +            } catch (IndexException e) {
 +                throw new HyracksDataException(e);
 +            }
 +        }
 +    }
 +
 +    @Override
 +    public void scheduleFlush(ILSMIndexOperationContext ctx, ILSMIOOperationCallback callback)
 +            throws HyracksDataException {
 +        if (!getAndEnterComponents(ctx, LSMOperationType.FLUSH, true)) {
 +            callback.afterFinalize(LSMOperationType.FLUSH, null);
 +            return;
 +        }
 +        lsmIndex.scheduleFlush(ctx, callback);
 +    }
 +
 +    @Override
 +    public void flush(ILSMIndexOperationContext ctx, ILSMIOOperation operation)
 +            throws HyracksDataException, IndexException {
 +        if (LOGGER.isLoggable(Level.INFO)) {
 +            LOGGER.info("Started a flush operation for index: " + lsmIndex + " ...");
 +        }
 +
 +        ILSMComponent newComponent = null;
 +        try {
 +            newComponent = lsmIndex.flush(operation);
 +            operation.getCallback().afterOperation(LSMOperationType.FLUSH, null, newComponent);
 +            lsmIndex.markAsValid(newComponent);
 +        } catch (Throwable e) {
 +            e.printStackTrace();
 +            throw e;
 +        } finally {
 +            exitComponents(ctx, LSMOperationType.FLUSH, newComponent, false);
 +            operation.getCallback().afterFinalize(LSMOperationType.FLUSH, newComponent);
 +        }
 +        if (LOGGER.isLoggable(Level.INFO)) {
 +            LOGGER.info("Finished the flush operation for index: " + lsmIndex);
 +        }
 +    }
 +
 +    @Override
 +    public void scheduleMerge(ILSMIndexOperationContext ctx, ILSMIOOperationCallback callback)
 +            throws HyracksDataException, IndexException {
 +        if (!getAndEnterComponents(ctx, LSMOperationType.MERGE, true)) {
 +            callback.afterFinalize(LSMOperationType.MERGE, null);
 +            return;
 +        }
 +        lsmIndex.scheduleMerge(ctx, callback);
 +    }
 +
 +    @Override
 +    public void scheduleFullMerge(ILSMIndexOperationContext ctx, ILSMIOOperationCallback callback)
 +            throws HyracksDataException, IndexException {
 +        fullMergeIsRequested.set(true);
 +        if (!getAndEnterComponents(ctx, LSMOperationType.MERGE, true)) {
 +            // If the merge cannot be scheduled because there is already an ongoing merge on subset/all of the components, then
 +            // whenever the current merge has finished, it will schedule the full merge again.
 +            callback.afterFinalize(LSMOperationType.MERGE, null);
 +            return;
 +        }
 +        fullMergeIsRequested.set(false);
 +        lsmIndex.scheduleMerge(ctx, callback);
 +    }
 +
 +    @Override
 +    public void merge(ILSMIndexOperationContext ctx, ILSMIOOperation operation)
 +            throws HyracksDataException, IndexException {
 +        if (LOGGER.isLoggable(Level.INFO)) {
 +            LOGGER.info("Started a merge operation for index: " + lsmIndex + " ...");
 +        }
 +
 +        ILSMComponent newComponent = null;
 +        try {
 +            newComponent = lsmIndex.merge(operation);
 +            operation.getCallback().afterOperation(LSMOperationType.MERGE, ctx.getComponentHolder(), newComponent);
 +            lsmIndex.markAsValid(newComponent);
 +        } catch (Throwable e) {
 +            e.printStackTrace();
 +            throw e;
 +        } finally {
 +            exitComponents(ctx, LSMOperationType.MERGE, newComponent, false);
 +            operation.getCallback().afterFinalize(LSMOperationType.MERGE, newComponent);
 +        }
 +        if (LOGGER.isLoggable(Level.INFO)) {
 +            LOGGER.info("Finished the merge operation for index: " + lsmIndex);
 +        }
 +    }
 +
 +    @Override
 +    public void addBulkLoadedComponent(ILSMComponent c) throws HyracksDataException, IndexException {
 +        lsmIndex.markAsValid(c);
 +        synchronized (opTracker) {
 +            lsmIndex.addComponent(c);
 +            if (replicationEnabled) {
 +                componentsToBeReplicated.clear();
 +                componentsToBeReplicated.add(c);
 +                triggerReplication(componentsToBeReplicated, true, LSMOperationType.MERGE);
 +            }
 +            mergePolicy.diskComponentAdded(lsmIndex, false);
 +        }
 +    }
 +
 +    @Override
 +    public ILSMOperationTracker getOperationTracker() {
 +        return opTracker;
 +    }
 +
 +    protected void triggerReplication(List<ILSMComponent> lsmComponents, boolean bulkload, LSMOperationType opType)
 +            throws HyracksDataException {
 +        ILSMIndexAccessorInternal accessor = lsmIndex.createAccessor(NoOpOperationCallback.INSTANCE,
 +                NoOpOperationCallback.INSTANCE);
 +        accessor.scheduleReplication(lsmComponents, bulkload, opType);
 +    }
 +
 +    @Override
 +    public void scheduleReplication(ILSMIndexOperationContext ctx, List<ILSMComponent> lsmComponents, boolean bulkload,
 +            LSMOperationType opType) throws HyracksDataException {
 +
 +        //enter the LSM components to be replicated to prevent them from being deleted until they are replicated
 +        if (!getAndEnterComponents(ctx, LSMOperationType.REPLICATE, false)) {
 +            return;
 +        }
 +
 +        lsmIndex.scheduleReplication(ctx, lsmComponents, bulkload, ReplicationOperation.REPLICATE, opType);
 +    }
 +
 +    @Override
 +    public void endReplication(ILSMIndexOperationContext ctx) throws HyracksDataException {
 +        try {
 +            exitComponents(ctx, LSMOperationType.REPLICATE, null, false);
 +        } catch (IndexException e) {
 +            throw new HyracksDataException(e);
 +        }
 +    }
++
++    protected void validateOperationEnterComponentsState(ILSMIndexOperationContext ctx) throws HyracksDataException {
++        if (ctx.isAccessingComponents()) {
++            throw new HyracksDataException("Opeartion already has access to components of index " + lsmIndex);
++        }
++    }
 +}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e928b6ac/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/LSMIndexSearchCursor.java
----------------------------------------------------------------------
diff --cc hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/LSMIndexSearchCursor.java
index c4d2fcc,0000000..befdd85
mode 100644,000000..100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/LSMIndexSearchCursor.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/LSMIndexSearchCursor.java
@@@ -1,306 -1,0 +1,308 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +
 +package org.apache.hyracks.storage.am.lsm.common.impls;
 +
 +import java.util.Comparator;
 +import java.util.List;
 +import java.util.PriorityQueue;
 +
 +import org.apache.hyracks.api.exceptions.HyracksDataException;
 +import org.apache.hyracks.dataflow.common.data.accessors.ITupleReference;
 +import org.apache.hyracks.storage.am.common.api.IIndexCursor;
 +import org.apache.hyracks.storage.am.common.api.ITreeIndexCursor;
 +import org.apache.hyracks.storage.am.common.api.IndexException;
 +import org.apache.hyracks.storage.am.common.ophelpers.MultiComparator;
 +import org.apache.hyracks.storage.am.lsm.common.api.ILSMComponent;
 +import org.apache.hyracks.storage.am.lsm.common.api.ILSMHarness;
 +import org.apache.hyracks.storage.am.lsm.common.api.ILSMIndexOperationContext;
 +import org.apache.hyracks.storage.am.lsm.common.api.ILSMTreeTupleReference;
 +import org.apache.hyracks.storage.common.buffercache.IBufferCache;
 +import org.apache.hyracks.storage.common.buffercache.ICachedPage;
 +
 +public abstract class LSMIndexSearchCursor implements ITreeIndexCursor {
 +    protected final ILSMIndexOperationContext opCtx;
 +    protected final boolean returnDeletedTuples;
 +    protected PriorityQueueElement outputElement;
 +    protected IIndexCursor[] rangeCursors;
 +    protected PriorityQueueElement[] pqes;
 +    protected PriorityQueue<PriorityQueueElement> outputPriorityQueue;
 +    protected PriorityQueueComparator pqCmp;
 +    protected MultiComparator cmp;
 +    protected boolean needPush;
 +    protected boolean includeMutableComponent;
 +    protected ILSMHarness lsmHarness;
 +
 +    protected List<ILSMComponent> operationalComponents;
 +
 +    public LSMIndexSearchCursor(ILSMIndexOperationContext opCtx, boolean returnDeletedTuples) {
 +        this.opCtx = opCtx;
 +        this.returnDeletedTuples = returnDeletedTuples;
 +        outputElement = null;
 +        needPush = false;
 +    }
 +
 +    public ILSMIndexOperationContext getOpCtx() {
 +        return opCtx;
 +    }
 +
 +    public void initPriorityQueue() throws HyracksDataException, IndexException {
 +        int pqInitSize = (rangeCursors.length > 0) ? rangeCursors.length : 1;
 +        if (outputPriorityQueue == null) {
 +            outputPriorityQueue = new PriorityQueue<PriorityQueueElement>(pqInitSize, pqCmp);
 +            pqes = new PriorityQueueElement[pqInitSize];
 +            for (int i = 0; i < pqInitSize; i++) {
 +                pqes[i] = new PriorityQueueElement(i);
 +            }
 +            for (int i = 0; i < rangeCursors.length; i++) {
 +                pushIntoPriorityQueue(pqes[i]);
 +            }
 +        } else {
 +            outputPriorityQueue.clear();
 +            // did size change?
 +            if (pqInitSize == pqes.length) {
 +                // size is the same -> re-use
 +                for (int i = 0; i < rangeCursors.length; i++) {
 +                    pqes[i].reset(null);
 +                    pushIntoPriorityQueue(pqes[i]);
 +                }
 +            } else {
 +                // size changed (due to flushes, merges, etc) -> re-create
 +                pqes = new PriorityQueueElement[pqInitSize];
 +                for (int i = 0; i < rangeCursors.length; i++) {
 +                    pqes[i] = new PriorityQueueElement(i);
 +                    pushIntoPriorityQueue(pqes[i]);
 +                }
 +            }
 +        }
 +    }
 +
 +    public IIndexCursor getCursor(int cursorIndex) {
 +        return rangeCursors[cursorIndex];
 +    }
 +
 +    @Override
 +    public void reset() throws HyracksDataException, IndexException {
 +        outputElement = null;
 +        needPush = false;
 +
 +        try {
 +            if (outputPriorityQueue != null) {
 +                outputPriorityQueue.clear();
 +            }
 +
 +            if (rangeCursors != null) {
 +                for (int i = 0; i < rangeCursors.length; i++) {
 +                    rangeCursors[i].reset();
 +                }
 +            }
 +            rangeCursors = null;
 +        } finally {
 +            if (lsmHarness != null) {
 +                lsmHarness.endSearch(opCtx);
 +            }
 +        }
 +    }
 +
 +    @Override
 +    public boolean hasNext() throws HyracksDataException, IndexException {
 +        checkPriorityQueue();
 +        return !outputPriorityQueue.isEmpty();
 +    }
 +
 +    @Override
 +    public void next() throws HyracksDataException {
 +        outputElement = outputPriorityQueue.poll();
 +        needPush = true;
 +    }
 +
 +    @Override
 +    public ICachedPage getPage() {
 +        // do nothing
 +        return null;
 +    }
 +
 +    @Override
 +    public void close() throws HyracksDataException {
 +        try {
-             outputPriorityQueue.clear();
++            if (outputPriorityQueue != null) {
++                outputPriorityQueue.clear();
++            }
 +            for (int i = 0; i < rangeCursors.length; i++) {
 +                rangeCursors[i].close();
 +            }
 +            rangeCursors = null;
 +        } finally {
 +            if (lsmHarness != null) {
 +                lsmHarness.endSearch(opCtx);
 +            }
 +        }
 +    }
 +
 +    @Override
 +    public void setBufferCache(IBufferCache bufferCache) {
 +        // do nothing
 +    }
 +
 +    @Override
 +    public void setFileId(int fileId) {
 +        // do nothing
 +    }
 +
 +    @Override
 +    public ITupleReference getTuple() {
 +        return outputElement.getTuple();
 +    }
 +
 +    protected boolean pushIntoPriorityQueue(PriorityQueueElement e) throws HyracksDataException, IndexException {
 +        int cursorIndex = e.getCursorIndex();
 +        if (rangeCursors[cursorIndex].hasNext()) {
 +            rangeCursors[cursorIndex].next();
 +            e.reset(rangeCursors[cursorIndex].getTuple());
 +            outputPriorityQueue.offer(e);
 +            return true;
 +        }
 +        rangeCursors[cursorIndex].close();
 +        return false;
 +    }
 +
 +    protected boolean isDeleted(PriorityQueueElement checkElement) throws HyracksDataException, IndexException {
 +        return ((ILSMTreeTupleReference) checkElement.getTuple()).isAntimatter();
 +    }
 +
 +    protected void checkPriorityQueue() throws HyracksDataException, IndexException {
 +        while (!outputPriorityQueue.isEmpty() || (needPush == true)) {
 +            if (!outputPriorityQueue.isEmpty()) {
 +                PriorityQueueElement checkElement = outputPriorityQueue.peek();
 +                // If there is no previous tuple or the previous tuple can be ignored
 +                if (outputElement == null) {
 +                    if (isDeleted(checkElement) && !returnDeletedTuples) {
 +                        // If the key has been deleted then pop it and set needPush to true.
 +                        // We cannot push immediately because the tuple may be
 +                        // modified if hasNext() is called
 +                        outputElement = outputPriorityQueue.poll();
 +                        needPush = true;
 +                    } else {
 +                        break;
 +                    }
 +                } else {
 +                    // Compare the previous tuple and the head tuple in the PQ
 +                    if (compare(cmp, outputElement.getTuple(), checkElement.getTuple()) == 0) {
 +                        // If the previous tuple and the head tuple are
 +                        // identical
 +                        // then pop the head tuple and push the next tuple from
 +                        // the tree of head tuple
 +
 +                        // the head element of PQ is useless now
 +                        PriorityQueueElement e = outputPriorityQueue.poll();
 +                        pushIntoPriorityQueue(e);
 +                    } else {
 +                        // If the previous tuple and the head tuple are different
 +                        // the info of previous tuple is useless
 +                        if (needPush == true) {
 +                            pushIntoPriorityQueue(outputElement);
 +                            needPush = false;
 +                        }
 +                        outputElement = null;
 +                    }
 +                }
 +            } else {
 +                // the priority queue is empty and needPush
 +                pushIntoPriorityQueue(outputElement);
 +                needPush = false;
 +                outputElement = null;
 +            }
 +        }
 +    }
 +
 +    @Override
 +    public boolean exclusiveLatchNodes() {
 +        return false;
 +    }
 +
 +    public class PriorityQueueElement {
 +        private ITupleReference tuple;
 +        private int cursorIndex;
 +
 +        public PriorityQueueElement(int cursorIndex) {
 +            tuple = null;
 +            this.cursorIndex = cursorIndex;
 +        }
 +
 +        public ITupleReference getTuple() {
 +            return tuple;
 +        }
 +
 +        public int getCursorIndex() {
 +            return cursorIndex;
 +        }
 +
 +        public void reset(ITupleReference tuple) {
 +            this.tuple = tuple;
 +        }
 +    }
 +
 +    public class PriorityQueueComparator implements Comparator<PriorityQueueElement> {
 +
 +        protected MultiComparator cmp;
 +
 +        public PriorityQueueComparator(MultiComparator cmp) {
 +            this.cmp = cmp;
 +        }
 +
 +        @Override
 +        public int compare(PriorityQueueElement elementA, PriorityQueueElement elementB) {
 +            int result;
 +            try {
 +                result = cmp.compare(elementA.getTuple(), elementB.getTuple());
 +                if (result != 0) {
 +                    return result;
 +                }
 +            } catch (HyracksDataException e) {
 +                throw new IllegalArgumentException(e);
 +            }
 +
 +            if (elementA.getCursorIndex() > elementB.getCursorIndex()) {
 +                return 1;
 +            } else {
 +                return -1;
 +            }
 +        }
 +
 +        public MultiComparator getMultiComparator() {
 +            return cmp;
 +        }
 +    }
 +
 +    protected void setPriorityQueueComparator() {
 +        if (pqCmp == null || cmp != pqCmp.getMultiComparator()) {
 +            pqCmp = new PriorityQueueComparator(cmp);
 +        }
 +    }
 +
 +    protected int compare(MultiComparator cmp, ITupleReference tupleA, ITupleReference tupleB)
 +            throws HyracksDataException {
 +        return cmp.compare(tupleA, tupleB);
 +    }
 +
 +    @Override
 +    public void markCurrentTupleAsUpdated() throws HyracksDataException {
 +        throw new HyracksDataException("Updating tuples is not supported with this cursor.");
 +    }
 +}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e928b6ac/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/impls/LSMInvertedIndexOpContext.java
----------------------------------------------------------------------
diff --cc hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/impls/LSMInvertedIndexOpContext.java
index c511a67,0000000..828e296
mode 100644,000000..100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/impls/LSMInvertedIndexOpContext.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/impls/LSMInvertedIndexOpContext.java
@@@ -1,169 -1,0 +1,170 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +
 +package org.apache.hyracks.storage.am.lsm.invertedindex.impls;
 +
 +import java.util.LinkedList;
 +import java.util.List;
 +
 +import org.apache.hyracks.api.exceptions.HyracksDataException;
 +import org.apache.hyracks.storage.am.common.api.IIndexAccessor;
 +import org.apache.hyracks.storage.am.common.api.IModificationOperationCallback;
 +import org.apache.hyracks.storage.am.common.api.ISearchOperationCallback;
 +import org.apache.hyracks.storage.am.common.api.ISearchPredicate;
 +import org.apache.hyracks.storage.am.common.impls.NoOpOperationCallback;
 +import org.apache.hyracks.storage.am.common.ophelpers.IndexOperation;
 +import org.apache.hyracks.storage.am.common.ophelpers.MultiComparator;
 +import org.apache.hyracks.storage.am.common.tuples.PermutingTupleReference;
 +import org.apache.hyracks.storage.am.lsm.common.api.ILSMComponent;
- import org.apache.hyracks.storage.am.lsm.common.api.ILSMIndexOperationContext;
++import org.apache.hyracks.storage.am.lsm.common.impls.AbstractLSMIndexOperationContext;
 +import org.apache.hyracks.storage.am.lsm.invertedindex.api.IInvertedIndexAccessor;
 +
- public class LSMInvertedIndexOpContext implements ILSMIndexOperationContext {
++public class LSMInvertedIndexOpContext extends AbstractLSMIndexOperationContext {
 +
 +    private static final int NUM_DOCUMENT_FIELDS = 1;
 +
 +    private IndexOperation op;
 +    private final List<ILSMComponent> componentHolder;
 +    private final List<ILSMComponent> componentsToBeMerged;
 +    private final List<ILSMComponent> componentsToBeReplicated;
 +
 +    private IModificationOperationCallback modificationCallback;
 +    private ISearchOperationCallback searchCallback;
 +
 +    // Tuple that only has the inverted-index elements (aka keys), projecting away the document fields.
 +    public PermutingTupleReference keysOnlyTuple;
 +
 +    // Accessor to the in-memory inverted indexes.
 +    public IInvertedIndexAccessor[] mutableInvIndexAccessors;
 +    // Accessor to the deleted-keys BTrees.
 +    public IIndexAccessor[] deletedKeysBTreeAccessors;
 +
 +    public IInvertedIndexAccessor currentMutableInvIndexAccessors;
 +    public IIndexAccessor currentDeletedKeysBTreeAccessors;
 +
 +    public final PermutingTupleReference indexTuple;
 +    public final MultiComparator filterCmp;
 +    public final PermutingTupleReference filterTuple;
 +
 +    public ISearchPredicate searchPredicate;
 +
 +    public LSMInvertedIndexOpContext(List<ILSMComponent> mutableComponents,
 +            IModificationOperationCallback modificationCallback, ISearchOperationCallback searchCallback,
 +            int[] invertedIndexFields, int[] filterFields) throws HyracksDataException {
 +        this.componentHolder = new LinkedList<ILSMComponent>();
 +        this.componentsToBeMerged = new LinkedList<ILSMComponent>();
 +        this.componentsToBeReplicated = new LinkedList<ILSMComponent>();
 +        this.modificationCallback = modificationCallback;
 +        this.searchCallback = searchCallback;
 +
 +        mutableInvIndexAccessors = new IInvertedIndexAccessor[mutableComponents.size()];
 +        deletedKeysBTreeAccessors = new IIndexAccessor[mutableComponents.size()];
 +
 +        for (int i = 0; i < mutableComponents.size(); i++) {
 +            LSMInvertedIndexMemoryComponent mutableComponent = (LSMInvertedIndexMemoryComponent) mutableComponents
 +                    .get(i);
 +            mutableInvIndexAccessors[i] = (IInvertedIndexAccessor) mutableComponent.getInvIndex()
 +                    .createAccessor(NoOpOperationCallback.INSTANCE, NoOpOperationCallback.INSTANCE);
 +            deletedKeysBTreeAccessors[i] = mutableComponent.getDeletedKeysBTree()
 +                    .createAccessor(NoOpOperationCallback.INSTANCE, NoOpOperationCallback.INSTANCE);
 +        }
 +
 +        assert mutableComponents.size() > 0;
 +
 +        // Project away the document fields, leaving only the key fields.
 +        LSMInvertedIndexMemoryComponent c = (LSMInvertedIndexMemoryComponent) mutableComponents.get(0);
 +        int numKeyFields = c.getInvIndex().getInvListTypeTraits().length;
 +        int[] keyFieldPermutation = new int[numKeyFields];
 +        for (int i = 0; i < numKeyFields; i++) {
 +            keyFieldPermutation[i] = NUM_DOCUMENT_FIELDS + i;
 +        }
 +        keysOnlyTuple = new PermutingTupleReference(keyFieldPermutation);
 +
 +        if (filterFields != null) {
 +            indexTuple = new PermutingTupleReference(invertedIndexFields);
 +            filterCmp = MultiComparator.create(c.getLSMComponentFilter().getFilterCmpFactories());
 +            filterTuple = new PermutingTupleReference(filterFields);
 +        } else {
 +            indexTuple = null;
 +            filterCmp = null;
 +            filterTuple = null;
 +        }
 +    }
 +
 +    @Override
 +    public void reset() {
++        super.reset();
 +        componentHolder.clear();
 +        componentsToBeMerged.clear();
 +        componentsToBeReplicated.clear();
 +    }
 +
 +    @Override
 +    // TODO: Ignore opcallback for now.
 +    public void setOperation(IndexOperation newOp) throws HyracksDataException {
 +        reset();
 +        op = newOp;
 +    }
 +
 +    @Override
 +    public IndexOperation getOperation() {
 +        return op;
 +    }
 +
 +    @Override
 +    public List<ILSMComponent> getComponentHolder() {
 +        return componentHolder;
 +    }
 +
 +    @Override
 +    public ISearchOperationCallback getSearchOperationCallback() {
 +        return searchCallback;
 +    }
 +
 +    @Override
 +    public IModificationOperationCallback getModificationCallback() {
 +        return modificationCallback;
 +    }
 +
 +    @Override
 +    public void setCurrentMutableComponentId(int currentMutableComponentId) {
 +        currentMutableInvIndexAccessors = mutableInvIndexAccessors[currentMutableComponentId];
 +        currentDeletedKeysBTreeAccessors = deletedKeysBTreeAccessors[currentMutableComponentId];
 +    }
 +
 +    @Override
 +    public List<ILSMComponent> getComponentsToBeMerged() {
 +        return componentsToBeMerged;
 +    }
 +
 +    @Override
 +    public void setSearchPredicate(ISearchPredicate searchPredicate) {
 +        this.searchPredicate = searchPredicate;
 +    }
 +
 +    @Override
 +    public ISearchPredicate getSearchPredicate() {
 +        return searchPredicate;
 +    }
 +
 +    @Override
 +    public List<ILSMComponent> getComponentsToBeReplicated() {
 +        return componentsToBeReplicated;
 +    }
 +}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e928b6ac/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-rtree/src/main/java/org/apache/hyracks/storage/am/lsm/rtree/impls/ExternalRTreeOpContext.java
----------------------------------------------------------------------
diff --cc hyracks-fullstack/hyracks/hyracks-storage-am-lsm-rtree/src/main/java/org/apache/hyracks/storage/am/lsm/rtree/impls/ExternalRTreeOpContext.java
index 6a9a640,0000000..358a42a
mode 100644,000000..100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-rtree/src/main/java/org/apache/hyracks/storage/am/lsm/rtree/impls/ExternalRTreeOpContext.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-rtree/src/main/java/org/apache/hyracks/storage/am/lsm/rtree/impls/ExternalRTreeOpContext.java
@@@ -1,134 -1,0 +1,135 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +package org.apache.hyracks.storage.am.lsm.rtree.impls;
 +
 +import java.util.LinkedList;
 +import java.util.List;
 +
 +import org.apache.hyracks.api.dataflow.value.IBinaryComparatorFactory;
 +import org.apache.hyracks.storage.am.common.api.IModificationOperationCallback;
 +import org.apache.hyracks.storage.am.common.api.ISearchOperationCallback;
 +import org.apache.hyracks.storage.am.common.api.ISearchPredicate;
 +import org.apache.hyracks.storage.am.common.api.ITreeIndexFrameFactory;
 +import org.apache.hyracks.storage.am.common.ophelpers.IndexOperation;
 +import org.apache.hyracks.storage.am.common.ophelpers.MultiComparator;
 +import org.apache.hyracks.storage.am.lsm.common.api.ILSMComponent;
 +import org.apache.hyracks.storage.am.lsm.common.api.ILSMHarness;
- import org.apache.hyracks.storage.am.lsm.common.api.ILSMIndexOperationContext;
++import org.apache.hyracks.storage.am.lsm.common.impls.AbstractLSMIndexOperationContext;
 +
- public class ExternalRTreeOpContext implements ILSMIndexOperationContext {
++public class ExternalRTreeOpContext extends AbstractLSMIndexOperationContext {
 +    private IndexOperation op;
 +    private MultiComparator bTreeCmp;
 +    private MultiComparator rTreeCmp;
 +    public final List<ILSMComponent> componentHolder;
 +    private final List<ILSMComponent> componentsToBeMerged;
 +    private final List<ILSMComponent> componentsToBeReplicated;
 +    public final ISearchOperationCallback searchCallback;
 +    private final int targetIndexVersion;
 +    public ISearchPredicate searchPredicate;
 +    public LSMRTreeCursorInitialState initialState;
 +
 +    public ExternalRTreeOpContext(IBinaryComparatorFactory[] rtreeCmpFactories,
 +            IBinaryComparatorFactory[] btreeCmpFactories, ISearchOperationCallback searchCallback,
 +            int targetIndexVersion, ILSMHarness lsmHarness, int[] comparatorFields,
 +            IBinaryComparatorFactory[] linearizerArray, ITreeIndexFrameFactory rtreeLeafFrameFactory,
 +            ITreeIndexFrameFactory rtreeInteriorFrameFactory, ITreeIndexFrameFactory btreeLeafFrameFactory) {
 +        this.componentHolder = new LinkedList<ILSMComponent>();
 +        this.componentsToBeMerged = new LinkedList<ILSMComponent>();
 +        this.componentsToBeReplicated = new LinkedList<ILSMComponent>();
 +        this.searchCallback = searchCallback;
 +        this.targetIndexVersion = targetIndexVersion;
 +        this.bTreeCmp = MultiComparator.create(btreeCmpFactories);
 +        this.rTreeCmp = MultiComparator.create(rtreeCmpFactories);
 +        initialState = new LSMRTreeCursorInitialState(rtreeLeafFrameFactory, rtreeInteriorFrameFactory,
 +                btreeLeafFrameFactory, bTreeCmp, lsmHarness, comparatorFields, linearizerArray, searchCallback,
 +                componentHolder);
 +    }
 +
 +    @Override
 +    public void setOperation(IndexOperation newOp) {
 +        reset();
 +        this.op = newOp;
 +    }
 +
 +    @Override
 +    public void setCurrentMutableComponentId(int currentMutableComponentId) {
 +        // Do nothing. this should never be called for disk only indexes
 +    }
 +
 +    @Override
 +    public void reset() {
++        super.reset();
 +        componentHolder.clear();
 +        componentsToBeMerged.clear();
 +        componentsToBeReplicated.clear();
 +    }
 +
 +    @Override
 +    public IndexOperation getOperation() {
 +        return op;
 +    }
 +
 +    public MultiComparator getBTreeMultiComparator() {
 +        return bTreeCmp;
 +    }
 +
 +    public MultiComparator getRTreeMultiComparator() {
 +        return rTreeCmp;
 +    }
 +
 +    @Override
 +    public List<ILSMComponent> getComponentHolder() {
 +        return componentHolder;
 +    }
 +
 +    @Override
 +    public ISearchOperationCallback getSearchOperationCallback() {
 +        return searchCallback;
 +    }
 +
 +    // This should never be needed for disk only indexes
 +    @Override
 +    public IModificationOperationCallback getModificationCallback() {
 +        return null;
 +    }
 +
 +    @Override
 +    public List<ILSMComponent> getComponentsToBeMerged() {
 +        return componentsToBeMerged;
 +    }
 +
 +    public int getTargetIndexVersion() {
 +        return targetIndexVersion;
 +    }
 +
 +    @Override
 +    public void setSearchPredicate(ISearchPredicate searchPredicate) {
 +        this.searchPredicate = searchPredicate;
 +    }
 +
 +    @Override
 +    public ISearchPredicate getSearchPredicate() {
 +        return searchPredicate;
 +    }
 +
 +    @Override
 +    public List<ILSMComponent> getComponentsToBeReplicated() {
 +        return componentsToBeReplicated;
 +    }
 +}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e928b6ac/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-rtree/src/main/java/org/apache/hyracks/storage/am/lsm/rtree/impls/LSMRTreeOpContext.java
----------------------------------------------------------------------
diff --cc hyracks-fullstack/hyracks/hyracks-storage-am-lsm-rtree/src/main/java/org/apache/hyracks/storage/am/lsm/rtree/impls/LSMRTreeOpContext.java
index 686cd2b,0000000..62f572f
mode 100644,000000..100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-rtree/src/main/java/org/apache/hyracks/storage/am/lsm/rtree/impls/LSMRTreeOpContext.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-rtree/src/main/java/org/apache/hyracks/storage/am/lsm/rtree/impls/LSMRTreeOpContext.java
@@@ -1,181 -1,0 +1,182 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +
 +package org.apache.hyracks.storage.am.lsm.rtree.impls;
 +
 +import java.util.LinkedList;
 +import java.util.List;
 +
 +import org.apache.hyracks.api.dataflow.value.IBinaryComparatorFactory;
 +import org.apache.hyracks.storage.am.btree.impls.BTree;
 +import org.apache.hyracks.storage.am.btree.impls.BTreeOpContext;
 +import org.apache.hyracks.storage.am.common.api.IModificationOperationCallback;
 +import org.apache.hyracks.storage.am.common.api.ISearchOperationCallback;
 +import org.apache.hyracks.storage.am.common.api.ISearchPredicate;
 +import org.apache.hyracks.storage.am.common.api.ITreeIndexFrameFactory;
 +import org.apache.hyracks.storage.am.common.impls.NoOpOperationCallback;
 +import org.apache.hyracks.storage.am.common.ophelpers.IndexOperation;
 +import org.apache.hyracks.storage.am.common.ophelpers.MultiComparator;
 +import org.apache.hyracks.storage.am.common.tuples.PermutingTupleReference;
 +import org.apache.hyracks.storage.am.lsm.common.api.ILSMComponent;
 +import org.apache.hyracks.storage.am.lsm.common.api.ILSMHarness;
- import org.apache.hyracks.storage.am.lsm.common.api.ILSMIndexOperationContext;
++import org.apache.hyracks.storage.am.lsm.common.impls.AbstractLSMIndexOperationContext;
 +import org.apache.hyracks.storage.am.rtree.impls.RTree;
 +import org.apache.hyracks.storage.am.rtree.impls.RTreeOpContext;
 +
- public final class LSMRTreeOpContext implements ILSMIndexOperationContext {
++public final class LSMRTreeOpContext extends AbstractLSMIndexOperationContext {
 +
 +    public RTree.RTreeAccessor[] mutableRTreeAccessors;
 +    public RTree.RTreeAccessor currentMutableRTreeAccessor;
 +    public BTree.BTreeAccessor[] mutableBTreeAccessors;
 +    public BTree.BTreeAccessor currentMutableBTreeAccessor;
 +
 +    public RTreeOpContext[] rtreeOpContexts;
 +    public BTreeOpContext[] btreeOpContexts;
 +    public RTreeOpContext currentRTreeOpContext;
 +    public BTreeOpContext currentBTreeOpContext;
 +
 +    private IndexOperation op;
 +    public final List<ILSMComponent> componentHolder;
 +    private final List<ILSMComponent> componentsToBeMerged;
 +    private final List<ILSMComponent> componentsToBeReplicated;
 +    private IModificationOperationCallback modificationCallback;
 +    private ISearchOperationCallback searchCallback;
 +    public final PermutingTupleReference indexTuple;
 +    public final MultiComparator filterCmp;
 +    public final PermutingTupleReference filterTuple;
 +    public ISearchPredicate searchPredicate;
 +    public LSMRTreeCursorInitialState searchInitialState;
 +
 +    public LSMRTreeOpContext(List<ILSMComponent> mutableComponents, ITreeIndexFrameFactory rtreeLeafFrameFactory,
 +            ITreeIndexFrameFactory rtreeInteriorFrameFactory, ITreeIndexFrameFactory btreeLeafFrameFactory,
 +            ITreeIndexFrameFactory btreeInteriorFrameFactory, IBinaryComparatorFactory[] rtreeCmpFactories,
 +            IBinaryComparatorFactory[] btreeCmpFactories, IModificationOperationCallback modificationCallback,
 +            ISearchOperationCallback searchCallback, int[] rtreeFields, int[] filterFields, ILSMHarness lsmHarness,
 +            int[] comparatorFields, IBinaryComparatorFactory[] linearizerArray) {
 +        mutableRTreeAccessors = new RTree.RTreeAccessor[mutableComponents.size()];
 +        mutableBTreeAccessors = new BTree.BTreeAccessor[mutableComponents.size()];
 +        rtreeOpContexts = new RTreeOpContext[mutableComponents.size()];
 +        btreeOpContexts = new BTreeOpContext[mutableComponents.size()];
 +
 +        LSMRTreeMemoryComponent c = (LSMRTreeMemoryComponent) mutableComponents.get(0);
 +
 +        for (int i = 0; i < mutableComponents.size(); i++) {
 +            LSMRTreeMemoryComponent mutableComponent = (LSMRTreeMemoryComponent) mutableComponents.get(i);
 +            mutableRTreeAccessors[i] = (RTree.RTreeAccessor) mutableComponent.getRTree()
 +                    .createAccessor(NoOpOperationCallback.INSTANCE, NoOpOperationCallback.INSTANCE);
 +            mutableBTreeAccessors[i] = (BTree.BTreeAccessor) mutableComponent.getBTree()
 +                    .createAccessor(NoOpOperationCallback.INSTANCE, NoOpOperationCallback.INSTANCE);
 +
 +            rtreeOpContexts[i] = mutableRTreeAccessors[i].getOpContext();
 +            btreeOpContexts[i] = mutableBTreeAccessors[i].getOpContext();
 +        }
 +
 +        assert mutableComponents.size() > 0;
 +        currentRTreeOpContext = rtreeOpContexts[0];
 +        currentBTreeOpContext = btreeOpContexts[0];
 +        this.componentHolder = new LinkedList<ILSMComponent>();
 +        this.componentsToBeMerged = new LinkedList<ILSMComponent>();
 +        this.componentsToBeReplicated = new LinkedList<ILSMComponent>();
 +        this.modificationCallback = modificationCallback;
 +        this.searchCallback = searchCallback;
 +
 +        if (filterFields != null) {
 +            indexTuple = new PermutingTupleReference(rtreeFields);
 +            filterCmp = MultiComparator.create(c.getLSMComponentFilter().getFilterCmpFactories());
 +            filterTuple = new PermutingTupleReference(filterFields);
 +        } else {
 +            indexTuple = null;
 +            filterCmp = null;
 +            filterTuple = null;
 +        }
 +        searchInitialState = new LSMRTreeCursorInitialState(rtreeLeafFrameFactory, rtreeInteriorFrameFactory,
 +                btreeLeafFrameFactory, getBTreeMultiComparator(), lsmHarness, comparatorFields, linearizerArray,
 +                searchCallback, componentHolder);
 +    }
 +
 +    @Override
 +    public void setOperation(IndexOperation newOp) {
 +        reset();
 +        this.op = newOp;
 +    }
 +
 +    @Override
 +    public void setCurrentMutableComponentId(int currentMutableComponentId) {
 +        currentMutableRTreeAccessor = mutableRTreeAccessors[currentMutableComponentId];
 +        currentMutableBTreeAccessor = mutableBTreeAccessors[currentMutableComponentId];
 +        currentRTreeOpContext = rtreeOpContexts[currentMutableComponentId];
 +        currentBTreeOpContext = btreeOpContexts[currentMutableComponentId];
 +        if (op == IndexOperation.INSERT) {
 +            currentRTreeOpContext.setOperation(op);
 +        } else if (op == IndexOperation.DELETE) {
 +            currentBTreeOpContext.setOperation(IndexOperation.INSERT);
 +        }
 +    }
 +
 +    @Override
 +    public void reset() {
++        super.reset();
 +        componentHolder.clear();
 +        componentsToBeMerged.clear();
 +    }
 +
 +    @Override
 +    public IndexOperation getOperation() {
 +        return op;
 +    }
 +
 +    public MultiComparator getBTreeMultiComparator() {
 +        return currentBTreeOpContext.cmp;
 +    }
 +
 +    @Override
 +    public List<ILSMComponent> getComponentHolder() {
 +        return componentHolder;
 +    }
 +
 +    @Override
 +    public ISearchOperationCallback getSearchOperationCallback() {
 +        return searchCallback;
 +    }
 +
 +    @Override
 +    public IModificationOperationCallback getModificationCallback() {
 +        return modificationCallback;
 +    }
 +
 +    @Override
 +    public List<ILSMComponent> getComponentsToBeMerged() {
 +        return componentsToBeMerged;
 +    }
 +
 +    @Override
 +    public void setSearchPredicate(ISearchPredicate searchPredicate) {
 +        this.searchPredicate = searchPredicate;
 +    }
 +
 +    @Override
 +    public ISearchPredicate getSearchPredicate() {
 +        return searchPredicate;
 +    }
 +
 +    @Override
 +    public List<ILSMComponent> getComponentsToBeReplicated() {
 +        return componentsToBeReplicated;
 +    }
 +}


[30/50] [abbrv] incubator-asterixdb git commit: Merge branch 'master' into hyracks-merge2

Posted by im...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_sum_empty/scalar_sum_empty.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_sum_empty/scalar_sum_empty.3.query.sqlpp
index b101b78,0000000..5030566
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_sum_empty/scalar_sum_empty.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_sum_empty/scalar_sum_empty.3.query.sqlpp
@@@ -1,27 -1,0 +1,27 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +/*
-  * Description    : Tests the scalar version of sql-sum with an empty list.
++ * Description    : Tests the scalar version of coll_sql-sum with an empty list.
 + * Success        : Yes
 + */
 +
 +use test;
 +
 +
- select element test."sql-sum"([]);
++select element test."coll_sql-sum"([]);

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_sum_null/scalar_sum_null.1.ddl.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_sum_null/scalar_sum_null.1.ddl.sqlpp
index a7d4b35,0000000..a661a36
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_sum_null/scalar_sum_null.1.ddl.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_sum_null/scalar_sum_null.1.ddl.sqlpp
@@@ -1,26 -1,0 +1,26 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +/*
-  * Description    : Tests the scalar version of sql-sum with nulls.
++ * Description    : Tests the scalar version of coll_sql-sum with nulls.
 + * Success        : Yes
 + */
 +
 +drop  database test if exists;
 +create  database test;
 +

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_sum_null/scalar_sum_null.2.update.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_sum_null/scalar_sum_null.2.update.sqlpp
index 980a38e,0000000..f05913c
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_sum_null/scalar_sum_null.2.update.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_sum_null/scalar_sum_null.2.update.sqlpp
@@@ -1,23 -1,0 +1,23 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +/*
-  * Description    : Tests the scalar version of sql-sum with nulls.
++ * Description    : Tests the scalar version of coll_sql-sum with nulls.
 + * Success        : Yes
 + */
 +

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_sum_null/scalar_sum_null.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_sum_null/scalar_sum_null.3.query.sqlpp
index 4bd7894,0000000..90020e5
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_sum_null/scalar_sum_null.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_sum_null/scalar_sum_null.3.query.sqlpp
@@@ -1,35 -1,0 +1,35 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +/*
-  * Description    : Tests the scalar version of sql-sum with nulls.
++ * Description    : Tests the scalar version of coll_sql-sum with nulls.
 + * Success        : Yes
 + */
 +
 +use test;
 +
 +
- with  i8 as test."sql-sum"([test.int8('1'),test.int8('2'),test.int8('3'),null]),
-       i16 as test."sql-sum"([test.int16('1'),test.int16('2'),test.int16('3'),null]),
-       i32 as test."sql-sum"([test.int32('1'),test.int32('2'),test.int32('3'),null]),
-       i64 as test."sql-sum"([test.int64('1'),test.int64('2'),test.int64('3'),null]),
-       f as test."sql-sum"([test.float('1'),test.float('2'),test.float('3'),null]),
-       d as test."sql-sum"([test.double('1'),test.double('2'),test.double('3'),null])
++with  i8 as test."coll_sql-sum"([test.int8('1'),test.int8('2'),test.int8('3'),null]),
++      i16 as test."coll_sql-sum"([test.int16('1'),test.int16('2'),test.int16('3'),null]),
++      i32 as test."coll_sql-sum"([test.int32('1'),test.int32('2'),test.int32('3'),null]),
++      i64 as test."coll_sql-sum"([test.int64('1'),test.int64('2'),test.int64('3'),null]),
++      f as test."coll_sql-sum"([test.float('1'),test.float('2'),test.float('3'),null]),
++      d as test."coll_sql-sum"([test.double('1'),test.double('2'),test.double('3'),null])
 +select element i
 +from  [i8,i16,i32,i64,f,d] as i
 +;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/sum_double/sum_double.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/sum_double/sum_double.3.query.sqlpp
index 6bb46d1,0000000..498337c
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/sum_double/sum_double.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/sum_double/sum_double.3.query.sqlpp
@@@ -1,26 -1,0 +1,26 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +
 +use test;
 +
 +
- select element test."sql-sum"((
++select element test."coll_sql-sum"((
 +    select element x
 +    from  [1.0,2.0,3.0] as x
 +));

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/sum_double_null/sum_double_null.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/sum_double_null/sum_double_null.3.query.sqlpp
index 30945ab,0000000..a4c12fc
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/sum_double_null/sum_double_null.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/sum_double_null/sum_double_null.3.query.sqlpp
@@@ -1,26 -1,0 +1,26 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +
 +use test;
 +
 +
- select element test."sql-sum"((
++select element test."coll_sql-sum"((
 +    select element x.doubleField
 +    from  Numeric as x
 +));

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/sum_empty_01/sum_empty_01.1.ddl.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/sum_empty_01/sum_empty_01.1.ddl.sqlpp
index 5559c89,0000000..cc6f313
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/sum_empty_01/sum_empty_01.1.ddl.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/sum_empty_01/sum_empty_01.1.ddl.sqlpp
@@@ -1,27 -1,0 +1,27 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +/*
-  * Description    : Tests that sql-sum aggregation correctly returns null for an empty stream,
++ * Description    : Tests that coll_sql-sum aggregation correctly returns null for an empty stream,
 + *                  without an aggregate combiner.
 + * Success        : Yes
 + */
 +
 +drop  database test if exists;
 +create  database test;
 +

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/sum_empty_01/sum_empty_01.2.update.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/sum_empty_01/sum_empty_01.2.update.sqlpp
index 7f09a49,0000000..f0eaeca
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/sum_empty_01/sum_empty_01.2.update.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/sum_empty_01/sum_empty_01.2.update.sqlpp
@@@ -1,24 -1,0 +1,24 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +/*
-  * Description    : Tests that sql-sum aggregation correctly returns null for an empty stream,
++ * Description    : Tests that coll_sql-sum aggregation correctly returns null for an empty stream,
 + *                  without an aggregate combiner.
 + * Success        : Yes
 + */
 +

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/sum_empty_01/sum_empty_01.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/sum_empty_01/sum_empty_01.3.query.sqlpp
index b20de6a,0000000..90ab5e5
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/sum_empty_01/sum_empty_01.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/sum_empty_01/sum_empty_01.3.query.sqlpp
@@@ -1,32 -1,0 +1,32 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +/*
-  * Description    : Tests that sql-sum aggregation correctly returns null for an empty stream,
++ * Description    : Tests that coll_sql-sum aggregation correctly returns null for an empty stream,
 + *                  without an aggregate combiner.
 + * Success        : Yes
 + */
 +
 +use test;
 +
 +
- select element test."sql-sum"((
++select element test."coll_sql-sum"((
 +    select element x
 +    from  [1,2,3] as x
 +    where (x > 10)
 +));

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/sum_empty_02/sum_empty_02.1.ddl.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/sum_empty_02/sum_empty_02.1.ddl.sqlpp
index 6316d09,0000000..f3134a2
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/sum_empty_02/sum_empty_02.1.ddl.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/sum_empty_02/sum_empty_02.1.ddl.sqlpp
@@@ -1,38 -1,0 +1,38 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +/*
-  * Description    : Tests that sql-sum aggregation correctly returns null for an empty stream,
++ * Description    : Tests that coll_sql-sum aggregation correctly returns null for an empty stream,
 + *                  with an aggregate combiner.
 + * Success        : Yes
 + */
 +
 +drop  database test if exists;
 +create  database test;
 +
 +use test;
 +
 +
 +create type test.TestType as
 + closed {
 +  id : int64,
 +  val : double
 +}
 +
 +create  table Test(TestType) primary key id;
 +

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/sum_empty_02/sum_empty_02.2.update.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/sum_empty_02/sum_empty_02.2.update.sqlpp
index f19da12,0000000..2ae8290
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/sum_empty_02/sum_empty_02.2.update.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/sum_empty_02/sum_empty_02.2.update.sqlpp
@@@ -1,24 -1,0 +1,24 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +/*
-  * Description    : Tests that sql-sum aggregation correctly returns null for an empty stream,
++ * Description    : Tests that coll_sql-sum aggregation correctly returns null for an empty stream,
 + *                  with an aggregate combiner.
 + * Success        : Yes
 + */
 +

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/sum_empty_02/sum_empty_02.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/sum_empty_02/sum_empty_02.3.query.sqlpp
index 737f23f,0000000..0e02b18
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/sum_empty_02/sum_empty_02.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/sum_empty_02/sum_empty_02.3.query.sqlpp
@@@ -1,31 -1,0 +1,31 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +/*
-  * Description    : Tests that sql-sum aggregation correctly returns null for an empty stream,
++ * Description    : Tests that coll_sql-sum aggregation correctly returns null for an empty stream,
 + *                  with an aggregate combiner.
 + * Success        : Yes
 + */
 +
 +use test;
 +
 +
- select element test."sql-sum"((
++select element test."coll_sql-sum"((
 +    select element x.val
 +    from  Test as x
 +));

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/sum_float/sum_float.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/sum_float/sum_float.3.query.sqlpp
index f84c73b,0000000..bd66bc0
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/sum_float/sum_float.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/sum_float/sum_float.3.query.sqlpp
@@@ -1,26 -1,0 +1,26 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +
 +use test;
 +
 +
- select element test."sql-sum"((
++select element test."coll_sql-sum"((
 +    select element x
 +    from  [test.float('1'),test.float('2'),test.float('3')] as x
 +));

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/sum_float_null/sum_float_null.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/sum_float_null/sum_float_null.3.query.sqlpp
index 367f251,0000000..50e1170
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/sum_float_null/sum_float_null.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/sum_float_null/sum_float_null.3.query.sqlpp
@@@ -1,26 -1,0 +1,26 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +
 +use test;
 +
 +
- select element test."sql-sum"((
++select element test."coll_sql-sum"((
 +    select element x.floatField
 +    from  Numeric as x
 +));

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/sum_int16/sum_int16.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/sum_int16/sum_int16.3.query.sqlpp
index 317daf0,0000000..5e334c0
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/sum_int16/sum_int16.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/sum_int16/sum_int16.3.query.sqlpp
@@@ -1,26 -1,0 +1,26 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +
 +use test;
 +
 +
- select element test."sql-sum"((
++select element test."coll_sql-sum"((
 +    select element x
 +    from  [test.int16('1'),test.int16('2'),test.int16('3')] as x
 +));

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/sum_int16_null/sum_int16_null.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/sum_int16_null/sum_int16_null.3.query.sqlpp
index 6f269f3,0000000..57b9a1a
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/sum_int16_null/sum_int16_null.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/sum_int16_null/sum_int16_null.3.query.sqlpp
@@@ -1,26 -1,0 +1,26 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +
 +use test;
 +
 +
- select element test."sql-sum"((
++select element test."coll_sql-sum"((
 +    select element x.int16Field
 +    from  Numeric as x
 +));

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/sum_int32/sum_int32.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/sum_int32/sum_int32.3.query.sqlpp
index 7f85f31,0000000..54b403d
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/sum_int32/sum_int32.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/sum_int32/sum_int32.3.query.sqlpp
@@@ -1,26 -1,0 +1,26 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +
 +use test;
 +
 +
- select element test."sql-sum"((
++select element test."coll_sql-sum"((
 +    select element x
 +    from  [test.int32('1'),test.int32('2'),test.int32('3')] as x
 +));

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/sum_int32_null/sum_int32_null.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/sum_int32_null/sum_int32_null.3.query.sqlpp
index 8339998,0000000..ffcd051
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/sum_int32_null/sum_int32_null.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/sum_int32_null/sum_int32_null.3.query.sqlpp
@@@ -1,26 -1,0 +1,26 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +
 +use test;
 +
 +
- select element test."sql-sum"((
++select element test."coll_sql-sum"((
 +    select element x.int32Field
 +    from  Numeric as x
 +));

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/sum_int64/sum_int64.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/sum_int64/sum_int64.3.query.sqlpp
index 57afbfc,0000000..c797118
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/sum_int64/sum_int64.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/sum_int64/sum_int64.3.query.sqlpp
@@@ -1,26 -1,0 +1,26 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +
 +use test;
 +
 +
- select element test."sql-sum"((
++select element test."coll_sql-sum"((
 +    select element x
 +    from  [test.int64('1'),test.int64('2'),test.int64('3')] as x
 +));

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/sum_int64_null/sum_int64_null.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/sum_int64_null/sum_int64_null.3.query.sqlpp
index c555e24,0000000..2a6e4b0
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/sum_int64_null/sum_int64_null.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/sum_int64_null/sum_int64_null.3.query.sqlpp
@@@ -1,26 -1,0 +1,26 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +
 +use test;
 +
 +
- select element test."sql-sum"((
++select element test."coll_sql-sum"((
 +    select element x.int64Field
 +    from  Numeric as x
 +));

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/sum_int8/sum_int8.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/sum_int8/sum_int8.3.query.sqlpp
index 046bdd0,0000000..2bcfc28
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/sum_int8/sum_int8.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/sum_int8/sum_int8.3.query.sqlpp
@@@ -1,26 -1,0 +1,26 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +
 +use test;
 +
 +
- select element test."sql-sum"((
++select element test."coll_sql-sum"((
 +    select element x
 +    from  [test.int8('1'),test.int8('2'),test.int8('3')] as x
 +));

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/sum_int8_null/sum_int8_null.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/sum_int8_null/sum_int8_null.3.query.sqlpp
index d2414d9,0000000..327ecfa
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/sum_int8_null/sum_int8_null.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/sum_int8_null/sum_int8_null.3.query.sqlpp
@@@ -1,26 -1,0 +1,26 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +
 +use test;
 +
 +
- select element test."sql-sum"((
++select element test."coll_sql-sum"((
 +    select element x.int8Field
 +    from  Numeric as x
 +));

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/sum_mixed/sum_mixed.1.ddl.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/sum_mixed/sum_mixed.1.ddl.sqlpp
index 24bdd1c,0000000..bf05e6b
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/sum_mixed/sum_mixed.1.ddl.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/sum_mixed/sum_mixed.1.ddl.sqlpp
@@@ -1,24 -1,0 +1,24 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +/*
- * Description  : Run sql-sum over an ordered list with mixed types
++* Description  : Run coll_sql-sum over an ordered list with mixed types
 +* Expected Res : Failure
 +* Date         : Feb 7th 2014
 +*/
 +

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/sum_mixed/sum_mixed.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/sum_mixed/sum_mixed.3.query.sqlpp
index d05df5c,0000000..76c5b86
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/sum_mixed/sum_mixed.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/sum_mixed/sum_mixed.3.query.sqlpp
@@@ -1,28 -1,0 +1,28 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +/*
- * Description  : Run sql-sum over an ordered list with mixed types
++* Description  : Run coll_sql-sum over an ordered list with mixed types
 +* Expected Res : Failure
 +* Date         : Feb 7th 2014
 +*/
 +
- select element "sql-sum"((
++select element "coll_sql-sum"((
 +    select element x
 +    from  [float('2.0'),'hello world',93847382783847382,date('2013-01-01')] as x
 +));

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/sum_null-with-pred/sum_null-with-pred.1.ddl.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/sum_null-with-pred/sum_null-with-pred.1.ddl.sqlpp
index 3442fd4,0000000..810ce8e
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/sum_null-with-pred/sum_null-with-pred.1.ddl.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/sum_null-with-pred/sum_null-with-pred.1.ddl.sqlpp
@@@ -1,39 -1,0 +1,39 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +/*
-  * Description      :   sql-sum() aggregate function must return the numeric sql-sum, when non null values are given as input to sql-sum().
-  *                  :   Get the sql-sum for those tuples which are non null for salary fields.
++ * Description      :   sql-coll_sum() aggregate function must return the numeric coll_sql-sum, when non null values are given as input to sql-coll_sum().
++ *                  :   Get the coll_sql-sum for those tuples which are non null for salary fields.
 + * Expected result  :   Success
 + * Date             :   July 20th 2012
 + */
 +
 +drop  database test if exists;
 +create  database test;
 +
 +use test;
 +
 +
 +create type test.TestType as
 +{
 +  id : int64,
 +  sal : int64?
 +}
 +
 +create  table tdst(TestType) primary key id;
 +

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/sum_null-with-pred/sum_null-with-pred.2.update.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/sum_null-with-pred/sum_null-with-pred.2.update.sqlpp
index 5d723c8,0000000..a62c168
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/sum_null-with-pred/sum_null-with-pred.2.update.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/sum_null-with-pred/sum_null-with-pred.2.update.sqlpp
@@@ -1,40 -1,0 +1,40 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +/*
-  * Description      :   sql-sum() aggregate function must return the numeric sql-sum, when non null values are given as input to sql-sum().
-  *                  :   Get the sql-sum for those tuples which are non null for salary fields.
++ * Description      :   sql-coll_sum() aggregate function must return the numeric coll_sql-sum, when non null values are given as input to sql-coll_sum().
++ *                  :   Get the coll_sql-sum for those tuples which are non null for salary fields.
 + * Expected result  :   Success
 + * Date             :   July 20th 2012
 + */
 +
 +use test;
 +
 +
 +insert into tdst
 +select element {'id':123,'sal':1000};
 +insert into tdst
 +select element {'id':113,'sal':2000};
 +insert into tdst
 +select element {'id':163,'sal':3000};
 +insert into tdst
 +select element {'id':161,'sal':4000};
 +insert into tdst
 +select element {'id':173,'sal':5000};
 +insert into tdst
 +select element {'id':183,'sal':null};

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/sum_null-with-pred/sum_null-with-pred.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/sum_null-with-pred/sum_null-with-pred.3.query.sqlpp
index 6f538cc,0000000..e119cfa
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/sum_null-with-pred/sum_null-with-pred.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/sum_null-with-pred/sum_null-with-pred.3.query.sqlpp
@@@ -1,35 -1,0 +1,35 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +/*
-  * Description      :   sql-sum() aggregate function must return the numeric sql-sum, when non null values are given as input to sql-sum().
-  *                  :   Get the sql-sum for those tuples which are non null for salary fields.
++ * Description      :   sql-coll_sum() aggregate function must return the numeric coll_sql-sum, when non null values are given as input to sql-coll_sum().
++ *                  :   Get the coll_sql-sum for those tuples which are non null for salary fields.
 + * Expected result  :   Success
 + * Date             :   July 20th 2012
 + */
 +
 +use test;
 +
 +
 +set "import-private-functions" "true";
 +
- select element test."sql-sum"((
++select element test."coll_sql-sum"((
 +    select element l.sal
 +    from  tdst as l
 +    where test.not(test."is-null"(l.sal))
 +));

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/sum_numeric_null/sum_numeric_null.1.ddl.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/sum_numeric_null/sum_numeric_null.1.ddl.sqlpp
index 942e2a0,0000000..e01b778
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/sum_numeric_null/sum_numeric_null.1.ddl.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/sum_numeric_null/sum_numeric_null.1.ddl.sqlpp
@@@ -1,38 -1,0 +1,38 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +/*
-  * Description      :   Add numeric values with a null value, sql-sum() aggregate function must return null.
++ * Description      :   Add numeric values with a null value, sql-coll_sum() aggregate function must return null.
 + * Expected result  :   Success
 + * Date             :   July 20th 2012
 + */
 +
 +drop  database test if exists;
 +create  database test;
 +
 +use test;
 +
 +
 +create type test.TestType as
 +{
 +  id : int64,
 +  sal : int64?
 +}
 +
 +create  table tdst(TestType) primary key id;
 +

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/sum_numeric_null/sum_numeric_null.2.update.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/sum_numeric_null/sum_numeric_null.2.update.sqlpp
index 44cc106,0000000..00f93a4
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/sum_numeric_null/sum_numeric_null.2.update.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/sum_numeric_null/sum_numeric_null.2.update.sqlpp
@@@ -1,39 -1,0 +1,39 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +/*
-  * Description      :   Add numeric values with a null value, sql-sum() aggregate function must return null.
++ * Description      :   Add numeric values with a null value, sql-coll_sum() aggregate function must return null.
 + * Expected result  :   Success
 + * Date             :   July 20th 2012
 + */
 +
 +use test;
 +
 +
 +insert into tdst
 +select element {'id':123,'sal':1000};
 +insert into tdst
 +select element {'id':113,'sal':2000};
 +insert into tdst
 +select element {'id':163,'sal':3000};
 +insert into tdst
 +select element {'id':161,'sal':4000};
 +insert into tdst
 +select element {'id':173,'sal':5000};
 +insert into tdst
 +select element {'id':183,'sal':null};

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/sum_numeric_null/sum_numeric_null.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/sum_numeric_null/sum_numeric_null.3.query.sqlpp
index ed82deb,0000000..e603924
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/sum_numeric_null/sum_numeric_null.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/sum_numeric_null/sum_numeric_null.3.query.sqlpp
@@@ -1,31 -1,0 +1,31 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +/*
-  * Description      :   Add numeric values with a null value, sql-sum() aggregate function must return null.
++ * Description      :   Add numeric values with a null value, sql-coll_sum() aggregate function must return null.
 + * Expected result  :   Success
 + * Date             :   July 20th 2012
 + */
 +
 +use test;
 +
 +
- select element test."sql-sum"((
++select element test."coll_sql-sum"((
 +    select element l.sal
 +    from  tdst as l
 +));

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/agg_null/agg_null.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/agg_null/agg_null.3.query.sqlpp
index dc3ef07,0000000..38c0972
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/agg_null/agg_null.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/agg_null/agg_null.3.query.sqlpp
@@@ -1,25 -1,0 +1,25 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +/*
 +* Description  : Run aggregates over both ordered list and unordered list with only null items.
 +* Expected Res : Success
 +* Date         : Jun 2nd 2013
 +*/
 +
- {'count1':count([null]),'average1':avg([null]),'sum1':sum([null]),'min1':min([null]),'max1':max([null]),'count2':count({{null,null}}),'average2':avg({{null,null}}),'sum2':sum({{null,null}}),'min2':min({{null,null}}),'max2':max({{null,null}})};
++{'count1':coll_count([null]),'average1':coll_avg([null]),'sum1':coll_sum([null]),'min1':coll_min([null]),'max1':coll_max([null]),'count2':coll_count({{null,null}}),'average2':coll_avg({{null,null}}),'sum2':coll_sum({{null,null}}),'min2':coll_min({{null,null}}),'max2':coll_max({{null,null}})};

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/agg_null_rec/agg_null_rec.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/agg_null_rec/agg_null_rec.3.query.sqlpp
index ddd882e,0000000..9ada73b
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/agg_null_rec/agg_null_rec.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/agg_null_rec/agg_null_rec.3.query.sqlpp
@@@ -1,43 -1,0 +1,43 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +/*
 +* Description  : Run aggregates over records, with only null items for the aggregating fields.
 +* Expected Res : Success
 +* Date         : Jun 2nd 2013
 +*/
 +
 +use test;
 +
 +
- {'count':test.count((
++{'count':test.coll_count((
 +    select element t.valplus
 +    from  Test as t
- )),'average':test.avg((
++)),'average':test.coll_avg((
 +    select element t.valplus
 +    from  Test as t
- )),'sum':test.sum((
++)),'sum':test.coll_sum((
 +    select element t.valplus
 +    from  Test as t
- )),'min':test.min((
++)),'min':test.coll_min((
 +    select element t.valplus
 +    from  Test as t
- )),'max':test.max((
++)),'max':test.coll_max((
 +    select element t.valplus
 +    from  Test as t
 +))};

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/agg_null_rec_1/agg_null_rec_1.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/agg_null_rec_1/agg_null_rec_1.3.query.sqlpp
index 6cf606d,0000000..3643901
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/agg_null_rec_1/agg_null_rec_1.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/agg_null_rec_1/agg_null_rec_1.3.query.sqlpp
@@@ -1,55 -1,0 +1,55 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +/*
 +* Description  : Run aggregates over records, with only null items for the aggregating fields.
 +* Expected Res : Success
 +* Date         : Jun 2nd 2013
 +*/
 +
 +use test;
 +
 +
- {'count':test.count((
++{'count':test.coll_count((
 +    select element t
 +    from  Test as t
- )),'average':test.avg((
++)),'average':test.coll_avg((
 +    select element i.val
 +    from  (
 +        select element t
 +        from  Test as t
 +    ) as i
- )),'sum':test.sum((
++)),'sum':test.coll_sum((
 +    select element i.val
 +    from  (
 +        select element t
 +        from  Test as t
 +    ) as i
- )),'min':test.min((
++)),'min':test.coll_min((
 +    select element i.valplus
 +    from  (
 +        select element t
 +        from  Test as t
 +    ) as i
- )),'max':test.max((
++)),'max':test.coll_max((
 +    select element i.valplus
 +    from  (
 +        select element t
 +        from  Test as t
 +    ) as i
 +))};

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/agg_number/agg_number.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/agg_number/agg_number.3.query.sqlpp
index 87f159f,0000000..c90641b
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/agg_number/agg_number.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/agg_number/agg_number.3.query.sqlpp
@@@ -1,25 -1,0 +1,25 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +/*
 +* Description  : Run aggregates over an ordered list with numbers of different types
 +* Expected Res : Success
 +* Date         : Jun 2nd 2013
 +*/
 +
- {'count1':count([float('2.0'),double('3.0'),93847382783847382,1]),'average1':avg([float('2.0'),double('3.0'),93847382783847382,1]),'sum1':sum([float('2.0'),double('3.0'),93847382783847382,1]),'min1':min([float('2.0'),double('3.0'),93847382783847382,1]),'max1':max([float('2.0'),double('3.0'),93847382783847382,1]),'count2':count({{float('2.0'),double('3.0'),93847382783847382,1}}),'average2':avg({{float('2.0'),double('3.0'),93847382783847382,1}}),'sum2':sum({{float('2.0'),double('3.0'),93847382783847382,1}}),'min2':min({{float('2.0'),double('3.0'),93847382783847382,1}}),'max2':max({{float('2.0'),double('3.0'),93847382783847382,1}})};
++{'count1':coll_count([float('2.0'),double('3.0'),93847382783847382,1]),'average1':coll_avg([float('2.0'),double('3.0'),93847382783847382,1]),'sum1':coll_sum([float('2.0'),double('3.0'),93847382783847382,1]),'min1':coll_min([float('2.0'),double('3.0'),93847382783847382,1]),'max1':coll_max([float('2.0'),double('3.0'),93847382783847382,1]),'count2':coll_count({{float('2.0'),double('3.0'),93847382783847382,1}}),'average2':coll_avg({{float('2.0'),double('3.0'),93847382783847382,1}}),'sum2':coll_sum({{float('2.0'),double('3.0'),93847382783847382,1}}),'min2':coll_min({{float('2.0'),double('3.0'),93847382783847382,1}}),'max2':coll_max({{float('2.0'),double('3.0'),93847382783847382,1}})};

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/agg_number_rec/agg_number_rec.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/agg_number_rec/agg_number_rec.3.query.sqlpp
index 63ba0ad,0000000..42306d8
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/agg_number_rec/agg_number_rec.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/agg_number_rec/agg_number_rec.3.query.sqlpp
@@@ -1,43 -1,0 +1,43 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +/*
 +* Description  : Run aggregates over records, with different numeric typed items for the aggregating fields.
 +* Expected Res : Success
 +* Date         : Jun 2nd 2013
 +*/
 +
 +use test;
 +
 +
- {'count':test.count((
++{'count':test.coll_count((
 +    select element t.valplus
 +    from  Test as t
- )),'average':test.avg((
++)),'average':test.coll_avg((
 +    select element t.valplus
 +    from  Test as t
- )),'sum':test.sum((
++)),'sum':test.coll_sum((
 +    select element t.valplus
 +    from  Test as t
- )),'min':test.min((
++)),'min':test.coll_min((
 +    select element t.valplus
 +    from  Test as t
- )),'max':test.max((
++)),'max':test.coll_max((
 +    select element t.valplus
 +    from  Test as t
 +))};

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/avg_double/avg_double.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/avg_double/avg_double.3.query.sqlpp
index 12a1cfb,0000000..ae1b1eb
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/avg_double/avg_double.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/avg_double/avg_double.3.query.sqlpp
@@@ -1,23 -1,0 +1,23 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +
- select element avg((
++select element coll_avg((
 +    select element x
 +    from  [1.0,2.0,double('3.0')] as x
 +));

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/avg_double_null/avg_double_null.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/avg_double_null/avg_double_null.3.query.sqlpp
index 2926232,0000000..4c10066
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/avg_double_null/avg_double_null.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/avg_double_null/avg_double_null.3.query.sqlpp
@@@ -1,26 -1,0 +1,26 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +
 +use test;
 +
 +
- {'average':test.avg((
++{'average':test.coll_avg((
 +    select element x.doubleField
 +    from  Numeric as x
 +))};

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/avg_empty_01/avg_empty_01.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/avg_empty_01/avg_empty_01.3.query.sqlpp
index 10aa600,0000000..d7a5263
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/avg_empty_01/avg_empty_01.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/avg_empty_01/avg_empty_01.3.query.sqlpp
@@@ -1,32 -1,0 +1,32 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +/*
 + * Description    : Tests that avg aggregation correctly returns null for an empty stream,
 + *                  without an aggregate combiner.
 + * Success        : Yes
 + */
 +
 +use test;
 +
 +
- select element test.avg((
++select element test.coll_avg((
 +    select element x
 +    from  [1,2,3] as x
 +    where (x > 10)
 +));

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/avg_empty_02/avg_empty_02.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/avg_empty_02/avg_empty_02.3.query.sqlpp
index 7b6a34a,0000000..8d2359f
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/avg_empty_02/avg_empty_02.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/avg_empty_02/avg_empty_02.3.query.sqlpp
@@@ -1,31 -1,0 +1,31 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +/*
 + * Description    : Tests that avg aggregation correctly returns null for an empty stream,
 + *                  with an aggregate combiner.
 + * Success        : Yes
 + */
 +
 +use test;
 +
 +
- select element test.avg((
++select element test.coll_avg((
 +    select element x.val
 +    from  Test as x
 +));

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/avg_float/avg_float.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/avg_float/avg_float.3.query.sqlpp
index 264ec2c,0000000..14c4bc5
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/avg_float/avg_float.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/avg_float/avg_float.3.query.sqlpp
@@@ -1,26 -1,0 +1,26 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +
 +use test;
 +
 +
- select element test.avg((
++select element test.coll_avg((
 +    select element x
 +    from  [test.float('1'),test.float('2'),test.float('3')] as x
 +));

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/avg_float_null/avg_float_nu.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/avg_float_null/avg_float_nu.3.query.sqlpp
index bbc0d48,0000000..c8c3774
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/avg_float_null/avg_float_nu.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/avg_float_null/avg_float_nu.3.query.sqlpp
@@@ -1,26 -1,0 +1,26 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +
 +use test;
 +
 +
- {'average':test.avg((
++{'average':test.coll_avg((
 +    select element x.floatField
 +    from  Numeric as x
 +))};

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/avg_int16/avg_int16.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/avg_int16/avg_int16.3.query.sqlpp
index d370467,0000000..a755219
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/avg_int16/avg_int16.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/avg_int16/avg_int16.3.query.sqlpp
@@@ -1,26 -1,0 +1,26 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +
 +use test;
 +
 +
- select element test.avg((
++select element test.coll_avg((
 +    select element x
 +    from  [test.int16('1'),test.int16('2'),test.int16('3')] as x
 +));

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/avg_int16_null/avg_int16_null.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/avg_int16_null/avg_int16_null.3.query.sqlpp
index 2587a9d,0000000..d50b0d1
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/avg_int16_null/avg_int16_null.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/avg_int16_null/avg_int16_null.3.query.sqlpp
@@@ -1,26 -1,0 +1,26 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +
 +use test;
 +
 +
- {'average':test.avg((
++{'average':test.coll_avg((
 +    select element x.int16Field
 +    from  Numeric as x
 +))};

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/avg_int32/avg_int32.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/avg_int32/avg_int32.3.query.sqlpp
index 769f3d0,0000000..a235c84
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/avg_int32/avg_int32.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/avg_int32/avg_int32.3.query.sqlpp
@@@ -1,26 -1,0 +1,26 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +
 +use test;
 +
 +
- select element test.avg((
++select element test.coll_avg((
 +    select element x
 +    from  [1,2,3] as x
 +));

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/avg_int32_null/avg_int32_null.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/avg_int32_null/avg_int32_null.3.query.sqlpp
index acb814e,0000000..9618d2d
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/avg_int32_null/avg_int32_null.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/avg_int32_null/avg_int32_null.3.query.sqlpp
@@@ -1,26 -1,0 +1,26 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +
 +use test;
 +
 +
- {'average':test.avg((
++{'average':test.coll_avg((
 +    select element x.int32Field
 +    from  Numeric as x
 +))};

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/avg_int64/avg_int64.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/avg_int64/avg_int64.3.query.sqlpp
index 1734f7f,0000000..f090f5b
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/avg_int64/avg_int64.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/avg_int64/avg_int64.3.query.sqlpp
@@@ -1,26 -1,0 +1,26 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +
 +use test;
 +
 +
- select element test.avg((
++select element test.coll_avg((
 +    select element x
 +    from  [test.int64('1'),test.int64('2'),test.int64('3')] as x
 +));



[23/50] [abbrv] incubator-asterixdb git commit: Merge branch 'master' into hyracks-merge2

Posted by im...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch/query-issue827-2/query-issue827-2.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch/query-issue827-2/query-issue827-2.3.query.sqlpp
index 562f78f,0000000..aefe8a3
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch/query-issue827-2/query-issue827-2.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch/query-issue827-2/query-issue827-2.3.query.sqlpp
@@@ -1,55 -1,0 +1,55 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +/*
 + * Description  : This test case is to verify the fix for issue827
 + * https://code.google.com/p/asterixdb/issues/detail?id=827
 + * Expected Res : SUCCESS
 + * Date         : 3rd Dec. 2014
 + */
 +
 +use tpch;
 +
 +
- {'sum_qty_partial':tpch.sum((
++{'sum_qty_partial':tpch.coll_sum((
 +    select element i.l_quantity
 +    from  LineItem as i
 +    where (i.l_shipdate <= '1998-09-02')
- )),'sum_base_price':tpch.sum((
++)),'sum_base_price':tpch.coll_sum((
 +    select element i.l_extendedprice
 +    from  LineItem as i
- )),'sum_disc_price':tpch.sum((
++)),'sum_disc_price':tpch.coll_sum((
 +    select element (i.l_extendedprice * (1 - i.l_discount))
 +    from  LineItem as i
- )),'sum_charge':tpch.sum((
++)),'sum_charge':tpch.coll_sum((
 +    select element (i.l_extendedprice * (1 - i.l_discount) * (1 + i.l_tax))
 +    from  LineItem as i
- )),'ave_qty':tpch.avg((
++)),'ave_qty':tpch.coll_avg((
 +    select element i.l_quantity
 +    from  LineItem as i
 +    where (i.l_shipdate <= '1998-09-02')
- )),'ave_price':tpch.avg((
++)),'ave_price':tpch.coll_avg((
 +    select element i.l_extendedprice
 +    from  LineItem as i
- )),'ave_disc':tpch.avg((
++)),'ave_disc':tpch.coll_avg((
 +    select element i.l_discount
 +    from  LineItem as i
- )),'count_order':tpch.count((
++)),'count_order':coll_count((
 +    select element l
 +    from  LineItem as l
 +))};

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch/query-issue827/query-issue827.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch/query-issue827/query-issue827.3.query.sqlpp
index d056bcb,0000000..e735107
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch/query-issue827/query-issue827.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch/query-issue827/query-issue827.3.query.sqlpp
@@@ -1,38 -1,0 +1,38 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +/*
 + * Description  : This test case is to verify the fix for issue827
 + * https://code.google.com/p/asterixdb/issues/detail?id=827
 + * Expected Res : SUCCESS
 + * Date         : 16th Nov. 2014
 + */
 +
 +use tpch;
 +
 +
- {'count_cheaps':tpch.count((
++{'count_cheaps':coll_count((
 +    select element l.l_quantity
 +    from  LineItem as l
- )),'count_expensives':tpch.sum((
++)),'count_expensives':tpch.coll_sum((
 +    select element e
 +    from  (
 +        select element l.l_extendedprice
 +        from  LineItem as l
 +    ) as e
 +))};

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/user-defined-functions/query-issue489/query-issue489.2.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/user-defined-functions/query-issue489/query-issue489.2.query.sqlpp
index 420933a,0000000..fad2f76
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/user-defined-functions/query-issue489/query-issue489.2.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/user-defined-functions/query-issue489/query-issue489.2.query.sqlpp
@@@ -1,30 -1,0 +1,30 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +/*
 + * Description  : This test case is to verify the fix for issue 489
 +               https://code.google.com/p/asterixdb/issues/detail?id=489
 + * Expected Res : Success
 + * Date         : 31st May 2013
 + */
 +
- select element count((
++select element coll_count((
 +    select element x
 +    from  "Metadata.Function" as x
 +    where (x.DataverseName = 'test')
 +));

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/user-defined-functions/query-issue489/query-issue489.4.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/user-defined-functions/query-issue489/query-issue489.4.query.sqlpp
index 420933a,0000000..fad2f76
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/user-defined-functions/query-issue489/query-issue489.4.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/user-defined-functions/query-issue489/query-issue489.4.query.sqlpp
@@@ -1,30 -1,0 +1,30 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +/*
 + * Description  : This test case is to verify the fix for issue 489
 +               https://code.google.com/p/asterixdb/issues/detail?id=489
 + * Expected Res : Success
 + * Date         : 31st May 2013
 + */
 +
- select element count((
++select element coll_count((
 +    select element x
 +    from  "Metadata.Function" as x
 +    where (x.DataverseName = 'test')
 +));


[17/50] [abbrv] incubator-asterixdb git commit: Merge branch 'master' into hyracks-merge2

Posted by im...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-common/src/test/java/org/apache/asterix/test/aql/TestExecutor.java
----------------------------------------------------------------------
diff --cc asterixdb/asterix-common/src/test/java/org/apache/asterix/test/aql/TestExecutor.java
index b102e0b,0000000..e23837b
mode 100644,000000..100644
--- a/asterixdb/asterix-common/src/test/java/org/apache/asterix/test/aql/TestExecutor.java
+++ b/asterixdb/asterix-common/src/test/java/org/apache/asterix/test/aql/TestExecutor.java
@@@ -1,798 -1,0 +1,802 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +package org.apache.asterix.test.aql;
 +
 +import java.io.BufferedInputStream;
 +import java.io.BufferedReader;
 +import java.io.File;
 +import java.io.FileInputStream;
 +import java.io.FileOutputStream;
 +import java.io.FileReader;
 +import java.io.InputStream;
 +import java.io.InputStreamReader;
 +import java.io.PrintWriter;
 +import java.io.StringWriter;
 +import java.lang.reflect.InvocationTargetException;
 +import java.lang.reflect.Method;
 +import java.nio.charset.StandardCharsets;
 +import java.util.Arrays;
 +import java.util.HashMap;
 +import java.util.HashSet;
 +import java.util.List;
 +import java.util.Set;
 +import java.util.logging.Level;
 +import java.util.logging.Logger;
 +
 +import org.apache.asterix.common.config.GlobalConfig;
 +import org.apache.asterix.common.utils.ServletUtil.Servlets;
 +import org.apache.asterix.test.server.ITestServer;
 +import org.apache.asterix.test.server.TestServerProvider;
 +import org.apache.asterix.testframework.context.TestCaseContext;
 +import org.apache.asterix.testframework.context.TestCaseContext.OutputFormat;
 +import org.apache.asterix.testframework.context.TestFileContext;
 +import org.apache.asterix.testframework.xml.TestCase.CompilationUnit;
 +import org.apache.asterix.testframework.xml.TestGroup;
 +import org.apache.commons.httpclient.DefaultHttpMethodRetryHandler;
 +import org.apache.commons.httpclient.HttpClient;
 +import org.apache.commons.httpclient.HttpMethod;
 +import org.apache.commons.httpclient.HttpMethodBase;
 +import org.apache.commons.httpclient.HttpStatus;
 +import org.apache.commons.httpclient.NameValuePair;
 +import org.apache.commons.httpclient.methods.GetMethod;
 +import org.apache.commons.httpclient.methods.PostMethod;
 +import org.apache.commons.httpclient.methods.StringRequestEntity;
 +import org.apache.commons.httpclient.params.HttpMethodParams;
 +import org.apache.commons.io.IOUtils;
++import org.apache.commons.lang3.mutable.MutableInt;
 +import org.json.JSONObject;
 +
 +public class TestExecutor {
 +
 +    /*
 +     * Static variables
 +     */
 +    protected static final Logger LOGGER = Logger.getLogger(TestExecutor.class.getName());
 +    // see
 +    // https://stackoverflow.com/questions/417142/what-is-the-maximum-length-of-a-url-in-different-browsers/417184
 +    private static final long MAX_URL_LENGTH = 2000l;
 +    private static Method managixExecuteMethod = null;
 +    private static final HashMap<Integer, ITestServer> runningTestServers = new HashMap<>();
 +
 +    /*
 +     * Instance members
 +     */
 +    private String host;
 +    private int port;
 +    private ITestLibrarian librarian;
 +
 +    public TestExecutor() {
 +        host = "127.0.0.1";
 +        port = 19002;
 +    }
 +
 +    public TestExecutor(String host, int port) {
 +        this.host = host;
 +        this.port = port;
 +    }
 +
 +    public void setLibrarian(ITestLibrarian librarian) {
 +        this.librarian = librarian;
 +    }
 +
 +    /**
 +     * Probably does not work well with symlinks.
 +     */
 +    public boolean deleteRec(File path) {
 +        if (path.isDirectory()) {
 +            for (File f : path.listFiles()) {
 +                if (!deleteRec(f)) {
 +                    return false;
 +                }
 +            }
 +        }
 +        return path.delete();
 +    }
 +
 +    public void runScriptAndCompareWithResult(File scriptFile, PrintWriter print, File expectedFile, File actualFile)
 +            throws Exception {
 +        System.err.println("Expected results file: " + expectedFile.toString());
 +        BufferedReader readerExpected = new BufferedReader(
 +                new InputStreamReader(new FileInputStream(expectedFile), "UTF-8"));
 +        BufferedReader readerActual = new BufferedReader(
 +                new InputStreamReader(new FileInputStream(actualFile), "UTF-8"));
 +        String lineExpected, lineActual;
 +        int num = 1;
 +        try {
 +            while ((lineExpected = readerExpected.readLine()) != null) {
 +                lineActual = readerActual.readLine();
 +                // Assert.assertEquals(lineExpected, lineActual);
 +                if (lineActual == null) {
 +                    if (lineExpected.isEmpty()) {
 +                        continue;
 +                    }
 +                    throw new Exception(
 +                            "Result for " + scriptFile + " changed at line " + num + ":\n< " + lineExpected + "\n> ");
 +                }
 +
 +                // Comparing result equality but ignore "Time"-prefixed fields. (for metadata tests.)
 +                String[] lineSplitsExpected = lineExpected.split("Time");
 +                String[] lineSplitsActual = lineActual.split("Time");
 +                if (lineSplitsExpected.length != lineSplitsActual.length) {
 +                    throw new Exception("Result for " + scriptFile + " changed at line " + num + ":\n< " + lineExpected
 +                            + "\n> " + lineActual);
 +                }
 +                if (!equalStrings(lineSplitsExpected[0], lineSplitsActual[0])) {
 +                    throw new Exception("Result for " + scriptFile + " changed at line " + num + ":\n< " + lineExpected
 +                            + "\n> " + lineActual);
 +                }
 +
 +                for (int i = 1; i < lineSplitsExpected.length; i++) {
 +                    String[] splitsByCommaExpected = lineSplitsExpected[i].split(",");
 +                    String[] splitsByCommaActual = lineSplitsActual[i].split(",");
 +                    if (splitsByCommaExpected.length != splitsByCommaActual.length) {
 +                        throw new Exception("Result for " + scriptFile + " changed at line " + num + ":\n< "
 +                                + lineExpected + "\n> " + lineActual);
 +                    }
 +                    for (int j = 1; j < splitsByCommaExpected.length; j++) {
 +                        if (splitsByCommaExpected[j].indexOf("DatasetId") >= 0) {
 +                            // Ignore the field "DatasetId", which is different for different runs.
 +                            // (for metadata tests)
 +                            continue;
 +                        }
 +                        if (!equalStrings(splitsByCommaExpected[j], splitsByCommaActual[j])) {
 +                            throw new Exception("Result for " + scriptFile + " changed at line " + num + ":\n< "
 +                                    + lineExpected + "\n> " + lineActual);
 +                        }
 +                    }
 +                }
 +
 +                ++num;
 +            }
 +            lineActual = readerActual.readLine();
 +            if (lineActual != null) {
 +                throw new Exception("Result for " + scriptFile + " changed at line " + num + ":\n< \n> " + lineActual);
 +            }
 +        } finally {
 +            readerExpected.close();
 +            readerActual.close();
 +        }
 +
 +    }
 +
 +    private boolean equalStrings(String s1, String s2) {
 +        String[] rowsOne = s1.split("\n");
 +        String[] rowsTwo = s2.split("\n");
 +
 +        for (int i = 0; i < rowsOne.length; i++) {
 +            String row1 = rowsOne[i];
 +            String row2 = rowsTwo[i];
 +
 +            if (row1.equals(row2)) {
 +                continue;
 +            }
 +
 +            String[] fields1 = row1.split(" ");
 +            String[] fields2 = row2.split(" ");
 +
 +            boolean bagEncountered = false;
 +            Set<String> bagElements1 = new HashSet<String>();
 +            Set<String> bagElements2 = new HashSet<String>();
 +
 +            for (int j = 0; j < fields1.length; j++) {
 +                if (j >= fields2.length) {
 +                    return false;
 +                } else if (fields1[j].equals(fields2[j])) {
 +                    bagEncountered = fields1[j].equals("{{");
 +                    if (fields1[j].startsWith("}}")) {
 +                        if (!bagElements1.equals(bagElements2)) {
 +                            return false;
 +                        }
 +                        bagEncountered = false;
 +                        bagElements1.clear();
 +                        bagElements2.clear();
 +                    }
 +                    continue;
 +                } else if (fields1[j].indexOf('.') < 0) {
 +                    if (bagEncountered) {
 +                        bagElements1.add(fields1[j].replaceAll(",$", ""));
 +                        bagElements2.add(fields2[j].replaceAll(",$", ""));
 +                        continue;
 +                    }
 +                    return false;
 +                } else {
 +                    // If the fields are floating-point numbers, test them
 +                    // for equality safely
 +                    fields1[j] = fields1[j].split(",")[0];
 +                    fields2[j] = fields2[j].split(",")[0];
 +                    try {
 +                        Double double1 = Double.parseDouble(fields1[j]);
 +                        Double double2 = Double.parseDouble(fields2[j]);
 +                        float float1 = (float) double1.doubleValue();
 +                        float float2 = (float) double2.doubleValue();
 +
 +                        if (Math.abs(float1 - float2) == 0) {
 +                            continue;
 +                        } else {
 +                            return false;
 +                        }
 +                    } catch (NumberFormatException ignored) {
 +                        // Guess they weren't numbers - must simply not be equal
 +                        return false;
 +                    }
 +                }
 +            }
 +        }
 +        return true;
 +    }
 +
 +    // For tests where you simply want the byte-for-byte output.
 +    private static void writeOutputToFile(File actualFile, InputStream resultStream) throws Exception {
 +        try (FileOutputStream out = new FileOutputStream(actualFile)) {
 +            IOUtils.copy(resultStream, out);
 +        }
 +    }
 +
 +    private int executeHttpMethod(HttpMethod method) throws Exception {
 +        HttpClient client = new HttpClient();
 +        int statusCode;
 +        try {
 +            statusCode = client.executeMethod(method);
 +        } catch (Exception e) {
 +            GlobalConfig.ASTERIX_LOGGER.log(Level.SEVERE, e.getMessage(), e);
 +            e.printStackTrace();
 +            throw e;
 +        }
 +        if (statusCode != HttpStatus.SC_OK) {
 +            // QQQ For now, we are indeed assuming we get back JSON errors.
 +            // In future this may be changed depending on the requested
 +            // output format sent to the servlet.
 +            String errorBody = method.getResponseBodyAsString();
 +            JSONObject result = new JSONObject(errorBody);
 +            String[] errors = { result.getJSONArray("error-code").getString(0), result.getString("summary"),
 +                    result.getString("stacktrace") };
 +            GlobalConfig.ASTERIX_LOGGER.log(Level.SEVERE, errors[2]);
 +            String exceptionMsg = "HTTP operation failed: " + errors[0] + "\nSTATUS LINE: " + method.getStatusLine()
 +                    + "\nSUMMARY: " + errors[1] + "\nSTACKTRACE: " + errors[2];
 +            throw new Exception(exceptionMsg);
 +        }
 +        return statusCode;
 +    }
 +
 +    // Executes Query and returns results as JSONArray
 +    public InputStream executeQuery(String str, OutputFormat fmt, String url, List<CompilationUnit.Parameter> params)
 +            throws Exception {
 +        HttpMethodBase method = null;
 +        if (str.length() + url.length() < MAX_URL_LENGTH) {
 +            // Use GET for small-ish queries
 +            method = new GetMethod(url);
 +            NameValuePair[] parameters = new NameValuePair[params.size() + 1];
 +            parameters[0] = new NameValuePair("query", str);
 +            int i = 1;
 +            for (CompilationUnit.Parameter param : params) {
 +                parameters[i++] = new NameValuePair(param.getName(), param.getValue());
 +            }
 +            method.setQueryString(parameters);
 +        } else {
 +            // Use POST for bigger ones to avoid 413 FULL_HEAD
 +            // QQQ POST API doesn't allow encoding additional parameters
 +            method = new PostMethod(url);
 +            ((PostMethod) method).setRequestEntity(new StringRequestEntity(str));
 +        }
 +
 +        // Set accepted output response type
 +        method.setRequestHeader("Accept", fmt.mimeType());
 +        // Provide custom retry handler is necessary
 +        method.getParams().setParameter(HttpMethodParams.RETRY_HANDLER, new DefaultHttpMethodRetryHandler(3, false));
 +        executeHttpMethod(method);
 +        return method.getResponseBodyAsStream();
 +    }
 +
 +    public InputStream executeClusterStateQuery(OutputFormat fmt, String url) throws Exception {
 +        HttpMethodBase method = new GetMethod(url);
 +
 +        // Set accepted output response type
 +        method.setRequestHeader("Accept", fmt.mimeType());
 +        // Provide custom retry handler is necessary
 +        method.getParams().setParameter(HttpMethodParams.RETRY_HANDLER, new DefaultHttpMethodRetryHandler(3, false));
 +        executeHttpMethod(method);
 +        return method.getResponseBodyAsStream();
 +    }
 +
 +    // To execute Update statements
 +    // Insert and Delete statements are executed here
 +    public void executeUpdate(String str, String url) throws Exception {
 +        // Create a method instance.
 +        PostMethod method = new PostMethod(url);
 +        method.setRequestEntity(new StringRequestEntity(str));
 +
 +        // Provide custom retry handler is necessary
 +        method.getParams().setParameter(HttpMethodParams.RETRY_HANDLER, new DefaultHttpMethodRetryHandler(3, false));
 +
 +        // Execute the method.
 +        executeHttpMethod(method);
 +    }
 +
 +    // Executes AQL in either async or async-defer mode.
 +    public InputStream executeAnyAQLAsync(String str, boolean defer, OutputFormat fmt, String url) throws Exception {
 +        // Create a method instance.
 +        PostMethod method = new PostMethod(url);
 +        if (defer) {
 +            method.setQueryString(new NameValuePair[] { new NameValuePair("mode", "asynchronous-deferred") });
 +        } else {
 +            method.setQueryString(new NameValuePair[] { new NameValuePair("mode", "asynchronous") });
 +        }
 +        method.setRequestEntity(new StringRequestEntity(str));
 +        method.setRequestHeader("Accept", fmt.mimeType());
 +
 +        // Provide custom retry handler is necessary
 +        method.getParams().setParameter(HttpMethodParams.RETRY_HANDLER, new DefaultHttpMethodRetryHandler(3, false));
 +        executeHttpMethod(method);
 +        InputStream resultStream = method.getResponseBodyAsStream();
 +
 +        String theHandle = IOUtils.toString(resultStream, "UTF-8");
 +
 +        // take the handle and parse it so results can be retrieved
 +        InputStream handleResult = getHandleResult(theHandle, fmt);
 +        return handleResult;
 +    }
 +
 +    private InputStream getHandleResult(String handle, OutputFormat fmt) throws Exception {
 +        final String url = "http://" + host + ":" + port + Servlets.QUERY_RESULT.getPath();
 +
 +        // Create a method instance.
 +        GetMethod method = new GetMethod(url);
 +        method.setQueryString(new NameValuePair[] { new NameValuePair("handle", handle) });
 +        method.setRequestHeader("Accept", fmt.mimeType());
 +
 +        // Provide custom retry handler is necessary
 +        method.getParams().setParameter(HttpMethodParams.RETRY_HANDLER, new DefaultHttpMethodRetryHandler(3, false));
 +
 +        executeHttpMethod(method);
 +        return method.getResponseBodyAsStream();
 +    }
 +
 +    // To execute DDL and Update statements
 +    // create type statement
 +    // create dataset statement
 +    // create index statement
 +    // create dataverse statement
 +    // create function statement
 +    public void executeDDL(String str, String url) throws Exception {
 +        // Create a method instance.
 +        PostMethod method = new PostMethod(url);
 +        method.setRequestEntity(new StringRequestEntity(str));
 +        // Provide custom retry handler is necessary
 +        method.getParams().setParameter(HttpMethodParams.RETRY_HANDLER, new DefaultHttpMethodRetryHandler(3, false));
 +
 +        // Execute the method.
 +        executeHttpMethod(method);
 +    }
 +
 +    // Method that reads a DDL/Update/Query File
 +    // and returns the contents as a string
 +    // This string is later passed to REST API for execution.
 +    public String readTestFile(File testFile) throws Exception {
 +        BufferedReader reader = new BufferedReader(new FileReader(testFile));
 +        String line = null;
 +        StringBuilder stringBuilder = new StringBuilder();
 +        String ls = System.getProperty("line.separator");
 +        while ((line = reader.readLine()) != null) {
 +            stringBuilder.append(line);
 +            stringBuilder.append(ls);
 +        }
 +        reader.close();
 +        return stringBuilder.toString();
 +    }
 +
 +    public static void executeManagixCommand(String command) throws ClassNotFoundException, NoSuchMethodException,
 +            SecurityException, IllegalAccessException, IllegalArgumentException, InvocationTargetException {
 +        if (managixExecuteMethod == null) {
 +            Class<?> clazz = Class.forName("org.apache.asterix.installer.test.AsterixInstallerIntegrationUtil");
 +            managixExecuteMethod = clazz.getMethod("executeCommand", String.class);
 +        }
 +        managixExecuteMethod.invoke(null, command);
 +    }
 +
 +    public static String executeScript(ProcessBuilder pb, String scriptPath) throws Exception {
 +        pb.command(scriptPath);
 +        Process p = pb.start();
 +        p.waitFor();
 +        return getProcessOutput(p);
 +    }
 +
 +    private static String executeVagrantScript(ProcessBuilder pb, String node, String scriptName) throws Exception {
 +        pb.command("vagrant", "ssh", node, "--", pb.environment().get("SCRIPT_HOME") + scriptName);
 +        Process p = pb.start();
 +        p.waitFor();
 +        InputStream input = p.getInputStream();
 +        return IOUtils.toString(input, StandardCharsets.UTF_8.name());
 +    }
 +
 +    private static String executeVagrantManagix(ProcessBuilder pb, String command) throws Exception {
 +        pb.command("vagrant", "ssh", "cc", "--", pb.environment().get("MANAGIX_HOME") + command);
 +        Process p = pb.start();
 +        p.waitFor();
 +        InputStream input = p.getInputStream();
 +        return IOUtils.toString(input, StandardCharsets.UTF_8.name());
 +    }
 +
 +    private static String getScriptPath(String queryPath, String scriptBasePath, String scriptFileName) {
 +        String targetWord = "queries" + File.separator;
 +        int targetWordSize = targetWord.lastIndexOf(File.separator);
 +        int beginIndex = queryPath.lastIndexOf(targetWord) + targetWordSize;
 +        int endIndex = queryPath.lastIndexOf(File.separator);
 +        String prefix = queryPath.substring(beginIndex, endIndex);
 +        String scriptPath = scriptBasePath + prefix + File.separator + scriptFileName;
 +        return scriptPath;
 +    }
 +
 +    private static String getProcessOutput(Process p) throws Exception {
 +        StringBuilder s = new StringBuilder();
 +        BufferedInputStream bisIn = new BufferedInputStream(p.getInputStream());
 +        StringWriter writerIn = new StringWriter();
 +        IOUtils.copy(bisIn, writerIn, "UTF-8");
 +        s.append(writerIn.toString());
 +
 +        BufferedInputStream bisErr = new BufferedInputStream(p.getErrorStream());
 +        StringWriter writerErr = new StringWriter();
 +        IOUtils.copy(bisErr, writerErr, "UTF-8");
 +        s.append(writerErr.toString());
 +        if (writerErr.toString().length() > 0) {
 +            StringBuilder sbErr = new StringBuilder();
 +            sbErr.append("script execution failed - error message:\n");
 +            sbErr.append("-------------------------------------------\n");
 +            sbErr.append(s.toString());
 +            sbErr.append("-------------------------------------------\n");
 +            LOGGER.info(sbErr.toString().trim());
 +            throw new Exception(s.toString().trim());
 +        }
 +        return s.toString();
 +    }
 +
 +    public void executeTest(String actualPath, TestCaseContext testCaseCtx, ProcessBuilder pb,
 +            boolean isDmlRecoveryTest) throws Exception {
 +        executeTest(actualPath, testCaseCtx, pb, isDmlRecoveryTest, null);
 +    }
 +
++    public void executeTest(TestCaseContext testCaseCtx, TestFileContext ctx, String statement,
++            boolean isDmlRecoveryTest, ProcessBuilder pb, CompilationUnit cUnit, MutableInt queryCount,
++            List<TestFileContext> expectedResultFileCtxs, File testFile, String actualPath) throws Exception {
++        File qbcFile;
++        boolean failed = false;
++        File expectedResultFile;
++        switch (ctx.getType()) {
++            case "ddl":
++                if (ctx.getFile().getName().endsWith("aql")) {
++                    executeDDL(statement, "http://" + host + ":" + port + Servlets.AQL_DDL.getPath());
++                } else {
++                    executeDDL(statement, "http://" + host + ":" + port + Servlets.SQLPP_DDL.getPath());
++                }
++                break;
++            case "update":
++                // isDmlRecoveryTest: set IP address
++                if (isDmlRecoveryTest && statement.contains("nc1://")) {
++                    statement = statement.replaceAll("nc1://", "127.0.0.1://../../../../../../asterix-app/");
++                }
++                if (ctx.getFile().getName().endsWith("aql")) {
++                    executeUpdate(statement, "http://" + host + ":" + port + Servlets.AQL_UPDATE.getPath());
++                } else {
++                    executeUpdate(statement, "http://" + host + ":" + port + Servlets.SQLPP_UPDATE.getPath());
++                }
++                break;
++            case "query":
++            case "async":
++            case "asyncdefer":
++                // isDmlRecoveryTest: insert Crash and Recovery
++                if (isDmlRecoveryTest) {
++                    executeScript(pb, pb.environment().get("SCRIPT_HOME") + File.separator + "dml_recovery"
++                            + File.separator + "kill_cc_and_nc.sh");
++                    executeScript(pb, pb.environment().get("SCRIPT_HOME") + File.separator + "dml_recovery"
++                            + File.separator + "stop_and_start.sh");
++                }
++                InputStream resultStream = null;
++                OutputFormat fmt = OutputFormat.forCompilationUnit(cUnit);
++                if (ctx.getFile().getName().endsWith("aql")) {
++                    if (ctx.getType().equalsIgnoreCase("query")) {
++                        resultStream = executeQuery(statement, fmt,
++                                "http://" + host + ":" + port + Servlets.AQL_QUERY.getPath(), cUnit.getParameter());
++                    } else if (ctx.getType().equalsIgnoreCase("async")) {
++                        resultStream = executeAnyAQLAsync(statement, false, fmt,
++                                "http://" + host + ":" + port + Servlets.AQL.getPath());
++                    } else if (ctx.getType().equalsIgnoreCase("asyncdefer")) {
++                        resultStream = executeAnyAQLAsync(statement, true, fmt,
++                                "http://" + host + ":" + port + Servlets.AQL.getPath());
++                    }
++                } else {
++                    if (ctx.getType().equalsIgnoreCase("query")) {
++                        resultStream = executeQuery(statement, fmt,
++                                "http://" + host + ":" + port + Servlets.SQLPP_QUERY.getPath(), cUnit.getParameter());
++                    } else if (ctx.getType().equalsIgnoreCase("async")) {
++                        resultStream = executeAnyAQLAsync(statement, false, fmt,
++                                "http://" + host + ":" + port + Servlets.SQLPP.getPath());
++                    } else if (ctx.getType().equalsIgnoreCase("asyncdefer")) {
++                        resultStream = executeAnyAQLAsync(statement, true, fmt,
++                                "http://" + host + ":" + port + Servlets.SQLPP.getPath());
++                    }
++                }
++
++                if (queryCount.intValue() >= expectedResultFileCtxs.size()) {
++                    throw new IllegalStateException("no result file for " + testFile.toString() + "; queryCount: "
++                            + queryCount + ", filectxs.size: " + expectedResultFileCtxs.size());
++                }
++                expectedResultFile = expectedResultFileCtxs.get(queryCount.intValue()).getFile();
++
++                File actualResultFile = testCaseCtx.getActualResultFile(cUnit, new File(actualPath));
++                actualResultFile.getParentFile().mkdirs();
++                writeOutputToFile(actualResultFile, resultStream);
++
++                runScriptAndCompareWithResult(testFile, new PrintWriter(System.err), expectedResultFile,
++                        actualResultFile);
++                queryCount.increment();
++                break;
++            case "mgx":
++                executeManagixCommand(statement);
++                break;
++            case "txnqbc": // qbc represents query before crash
++                resultStream = executeQuery(statement, OutputFormat.forCompilationUnit(cUnit),
++                        "http://" + host + ":" + port + Servlets.AQL_QUERY.getPath(), cUnit.getParameter());
++                qbcFile = getTestCaseQueryBeforeCrashFile(actualPath, testCaseCtx, cUnit);
++                qbcFile.getParentFile().mkdirs();
++                writeOutputToFile(qbcFile, resultStream);
++                break;
++            case "txnqar": // qar represents query after recovery
++                resultStream = executeQuery(statement, OutputFormat.forCompilationUnit(cUnit),
++                        "http://" + host + ":" + port + Servlets.AQL_QUERY.getPath(), cUnit.getParameter());
++                File qarFile = new File(actualPath + File.separator
++                        + testCaseCtx.getTestCase().getFilePath().replace(File.separator, "_") + "_" + cUnit.getName()
++                        + "_qar.adm");
++                qarFile.getParentFile().mkdirs();
++                writeOutputToFile(qarFile, resultStream);
++                qbcFile = getTestCaseQueryBeforeCrashFile(actualPath, testCaseCtx, cUnit);
++                runScriptAndCompareWithResult(testFile, new PrintWriter(System.err), qbcFile, qarFile);
++                break;
++            case "txneu": // eu represents erroneous update
++                try {
++                    executeUpdate(statement, "http://" + host + ":" + port + Servlets.AQL_UPDATE.getPath());
++                } catch (Exception e) {
++                    // An exception is expected.
++                    failed = true;
++                    e.printStackTrace();
++                }
++                if (!failed) {
++                    throw new Exception("Test \"" + testFile + "\" FAILED!\n  An exception" + "is expected.");
++                }
++                System.err.println("...but that was expected.");
++                break;
++            case "script":
++                try {
++                    String output = executeScript(pb, getScriptPath(testFile.getAbsolutePath(),
++                            pb.environment().get("SCRIPT_HOME"), statement.trim()));
++                    if (output.contains("ERROR")) {
++                        throw new Exception(output);
++                    }
++                } catch (Exception e) {
++                    throw new Exception("Test \"" + testFile + "\" FAILED!\n", e);
++                }
++                break;
++            case "sleep":
++                String[] lines = statement.split("\n");
++                Thread.sleep(Long.parseLong(lines[lines.length - 1].trim()));
++                break;
++            case "errddl": // a ddlquery that expects error
++                try {
++                    executeDDL(statement, "http://" + host + ":" + port + Servlets.AQL_DDL.getPath());
++                } catch (Exception e) {
++                    // expected error happens
++                    failed = true;
++                    e.printStackTrace();
++                }
++                if (!failed) {
++                    throw new Exception("Test \"" + testFile + "\" FAILED!\n  An exception is expected.");
++                }
++                System.err.println("...but that was expected.");
++                break;
++            case "vscript": // a script that will be executed on a vagrant virtual node
++                try {
++                    String[] command = statement.trim().split(" ");
++                    if (command.length != 2) {
++                        throw new Exception("invalid vagrant script format");
++                    }
++                    String nodeId = command[0];
++                    String scriptName = command[1];
++                    String output = executeVagrantScript(pb, nodeId, scriptName);
++                    if (output.contains("ERROR")) {
++                        throw new Exception(output);
++                    }
++                } catch (Exception e) {
++                    throw new Exception("Test \"" + testFile + "\" FAILED!\n", e);
++                }
++                break;
++            case "vmgx": // a managix command that will be executed on vagrant cc node
++                try {
++                    String output = executeVagrantManagix(pb, statement);
++                    if (output.contains("ERROR")) {
++                        throw new Exception(output);
++                    }
++                } catch (Exception e) {
++                    throw new Exception("Test \"" + testFile + "\" FAILED!\n", e);
++                }
++                break;
++            case "cstate": // cluster state query
++                try {
++                    fmt = OutputFormat.forCompilationUnit(cUnit);
++                    resultStream = executeClusterStateQuery(fmt,
++                            "http://" + host + ":" + port + Servlets.CLUSTER_STATE.getPath());
++                    expectedResultFile = expectedResultFileCtxs.get(queryCount.intValue()).getFile();
++                    actualResultFile = testCaseCtx.getActualResultFile(cUnit, new File(actualPath));
++                    actualResultFile.getParentFile().mkdirs();
++                    writeOutputToFile(actualResultFile, resultStream);
++                    runScriptAndCompareWithResult(testFile, new PrintWriter(System.err), expectedResultFile,
++                            actualResultFile);
++                    queryCount.increment();
++                } catch (Exception e) {
++                    throw new Exception("Test \"" + testFile + "\" FAILED!\n", e);
++                }
++                break;
++            case "server": // (start <test server name> <port>
++                           // [<arg1>][<arg2>][<arg3>]...|stop (<port>|all))
++                try {
++                    lines = statement.trim().split("\n");
++                    String[] command = lines[lines.length - 1].trim().split(" ");
++                    if (command.length < 2) {
++                        throw new Exception("invalid server command format. expected format ="
++                                + " (start <test server name> <port> [<arg1>][<arg2>][<arg3>]"
++                                + "...|stop (<port>|all))");
++                    }
++                    String action = command[0];
++                    if (action.equals("start")) {
++                        if (command.length < 3) {
++                            throw new Exception("invalid server start command. expected format ="
++                                    + " (start <test server name> <port> [<arg1>][<arg2>][<arg3>]...");
++                        }
++                        String name = command[1];
++                        Integer port = new Integer(command[2]);
++                        if (runningTestServers.containsKey(port)) {
++                            throw new Exception("server with port " + port + " is already running");
++                        }
++                        ITestServer server = TestServerProvider.createTestServer(name, port);
++                        server.configure(Arrays.copyOfRange(command, 3, command.length));
++                        server.start();
++                        runningTestServers.put(port, server);
++                    } else if (action.equals("stop")) {
++                        String target = command[1];
++                        if (target.equals("all")) {
++                            for (ITestServer server : runningTestServers.values()) {
++                                server.stop();
++                            }
++                            runningTestServers.clear();
++                        } else {
++                            Integer port = new Integer(command[1]);
++                            ITestServer server = runningTestServers.get(port);
++                            if (server == null) {
++                                throw new Exception("no server is listening to port " + port);
++                            }
++                            server.stop();
++                            runningTestServers.remove(port);
++                        }
++                    } else {
++                        throw new Exception("unknown server action");
++                    }
++                } catch (Exception e) {
++                    throw new Exception("Test \"" + testFile + "\" FAILED!\n", e);
++                }
++                break;
++            case "lib": // expected format <dataverse-name> <library-name>
++                        // <library-directory>
++                        // TODO: make this case work well with entity names containing spaces by
++                        // looking for \"
++                lines = statement.split("\n");
++                String lastLine = lines[lines.length - 1];
++                String[] command = lastLine.trim().split(" ");
++                if (command.length < 3) {
++                    throw new Exception("invalid library format");
++                }
++                String dataverse = command[1];
++                String library = command[2];
++                switch (command[0]) {
++                    case "install":
++                        if (command.length != 4) {
++                            throw new Exception("invalid library format");
++                        }
++                        String libPath = command[3];
++                        librarian.install(dataverse, library, libPath);
++                        break;
++                    case "uninstall":
++                        if (command.length != 3) {
++                            throw new Exception("invalid library format");
++                        }
++                        librarian.uninstall(dataverse, library);
++                        break;
++                    default:
++                        throw new Exception("invalid library format");
++                }
++                break;
++            default:
++                throw new IllegalArgumentException("No statements of type " + ctx.getType());
++        }
++    }
++
 +    public void executeTest(String actualPath, TestCaseContext testCaseCtx, ProcessBuilder pb,
 +            boolean isDmlRecoveryTest, TestGroup failedGroup) throws Exception {
- 
 +        File testFile;
-         File expectedResultFile;
 +        String statement;
 +        List<TestFileContext> expectedResultFileCtxs;
 +        List<TestFileContext> testFileCtxs;
-         File qbcFile = null;
-         File qarFile = null;
-         int queryCount = 0;
++        MutableInt queryCount = new MutableInt(0);
 +        int numOfErrors = 0;
 +        int numOfFiles = 0;
- 
 +        List<CompilationUnit> cUnits = testCaseCtx.getTestCase().getCompilationUnit();
 +        for (CompilationUnit cUnit : cUnits) {
 +            LOGGER.info(
 +                    "Starting [TEST]: " + testCaseCtx.getTestCase().getFilePath() + "/" + cUnit.getName() + " ... ");
 +            testFileCtxs = testCaseCtx.getTestFiles(cUnit);
 +            expectedResultFileCtxs = testCaseCtx.getExpectedResultFiles(cUnit);
 +            for (TestFileContext ctx : testFileCtxs) {
 +                numOfFiles++;
 +                testFile = ctx.getFile();
 +                statement = readTestFile(testFile);
-                 boolean failed = false;
 +                try {
-                     switch (ctx.getType()) {
-                         case "ddl":
-                             if (ctx.getFile().getName().endsWith("aql")) {
-                                 executeDDL(statement, "http://" + host + ":" + port + Servlets.AQL_DDL.getPath());
-                             } else {
-                                 executeDDL(statement, "http://" + host + ":" + port + Servlets.SQLPP_DDL.getPath());
-                             }
-                             break;
-                         case "update":
-                             // isDmlRecoveryTest: set IP address
-                             if (isDmlRecoveryTest && statement.contains("nc1://")) {
-                                 statement = statement.replaceAll("nc1://",
-                                         "127.0.0.1://../../../../../../asterix-app/");
-                             }
-                             if (ctx.getFile().getName().endsWith("aql")) {
-                                 executeUpdate(statement, "http://" + host + ":" + port + Servlets.AQL_UPDATE.getPath());
-                             } else {
-                                 executeUpdate(statement,
-                                         "http://" + host + ":" + port + Servlets.SQLPP_UPDATE.getPath());
-                             }
-                             break;
-                         case "query":
-                         case "async":
-                         case "asyncdefer":
-                             // isDmlRecoveryTest: insert Crash and Recovery
-                             if (isDmlRecoveryTest) {
-                                 executeScript(pb, pb.environment().get("SCRIPT_HOME") + File.separator + "dml_recovery"
-                                         + File.separator + "kill_cc_and_nc.sh");
-                                 executeScript(pb, pb.environment().get("SCRIPT_HOME") + File.separator + "dml_recovery"
-                                         + File.separator + "stop_and_start.sh");
-                             }
-                             InputStream resultStream = null;
-                             OutputFormat fmt = OutputFormat.forCompilationUnit(cUnit);
-                             if (ctx.getFile().getName().endsWith("aql")) {
-                                 if (ctx.getType().equalsIgnoreCase("query")) {
-                                     resultStream = executeQuery(statement, fmt,
-                                             "http://" + host + ":" + port + Servlets.AQL_QUERY.getPath(),
-                                             cUnit.getParameter());
-                                 } else if (ctx.getType().equalsIgnoreCase("async")) {
-                                     resultStream = executeAnyAQLAsync(statement, false, fmt,
-                                             "http://" + host + ":" + port + Servlets.AQL.getPath());
-                                 } else if (ctx.getType().equalsIgnoreCase("asyncdefer")) {
-                                     resultStream = executeAnyAQLAsync(statement, true, fmt,
-                                             "http://" + host + ":" + port + Servlets.AQL.getPath());
-                                 }
-                             } else {
-                                 if (ctx.getType().equalsIgnoreCase("query")) {
-                                     resultStream = executeQuery(statement, fmt,
-                                             "http://" + host + ":" + port + Servlets.SQLPP_QUERY.getPath(),
-                                             cUnit.getParameter());
-                                 } else if (ctx.getType().equalsIgnoreCase("async")) {
-                                     resultStream = executeAnyAQLAsync(statement, false, fmt,
-                                             "http://" + host + ":" + port + Servlets.SQLPP.getPath());
-                                 } else if (ctx.getType().equalsIgnoreCase("asyncdefer")) {
-                                     resultStream = executeAnyAQLAsync(statement, true, fmt,
-                                             "http://" + host + ":" + port + Servlets.SQLPP.getPath());
-                                 }
-                             }
- 
-                             if (queryCount >= expectedResultFileCtxs.size()) {
-                                 throw new IllegalStateException(
-                                         "no result file for " + testFile.toString() + "; queryCount: " + queryCount
-                                                 + ", filectxs.size: " + expectedResultFileCtxs.size());
-                             }
-                             expectedResultFile = expectedResultFileCtxs.get(queryCount).getFile();
- 
-                             File actualResultFile = testCaseCtx.getActualResultFile(cUnit, new File(actualPath));
-                             actualResultFile.getParentFile().mkdirs();
-                             writeOutputToFile(actualResultFile, resultStream);
- 
-                             runScriptAndCompareWithResult(testFile, new PrintWriter(System.err), expectedResultFile,
-                                     actualResultFile);
-                             queryCount++;
-                             break;
-                         case "mgx":
-                             executeManagixCommand(statement);
-                             break;
-                         case "txnqbc": // qbc represents query before crash
-                             resultStream = executeQuery(statement, OutputFormat.forCompilationUnit(cUnit),
-                                     "http://" + host + ":" + port + Servlets.AQL_QUERY.getPath(), cUnit.getParameter());
-                             qbcFile = new File(actualPath + File.separator
-                                     + testCaseCtx.getTestCase().getFilePath().replace(File.separator, "_") + "_"
-                                     + cUnit.getName() + "_qbc.adm");
-                             qbcFile.getParentFile().mkdirs();
-                             writeOutputToFile(qbcFile, resultStream);
-                             break;
-                         case "txnqar": // qar represents query after recovery
-                             resultStream = executeQuery(statement, OutputFormat.forCompilationUnit(cUnit),
-                                     "http://" + host + ":" + port + Servlets.AQL_QUERY.getPath(), cUnit.getParameter());
-                             qarFile = new File(actualPath + File.separator
-                                     + testCaseCtx.getTestCase().getFilePath().replace(File.separator, "_") + "_"
-                                     + cUnit.getName() + "_qar.adm");
-                             qarFile.getParentFile().mkdirs();
-                             writeOutputToFile(qarFile, resultStream);
-                             runScriptAndCompareWithResult(testFile, new PrintWriter(System.err), qbcFile, qarFile);
-                             break;
-                         case "txneu": // eu represents erroneous update
-                             try {
-                                 executeUpdate(statement, "http://" + host + ":" + port + Servlets.AQL_UPDATE.getPath());
-                             } catch (Exception e) {
-                                 // An exception is expected.
-                                 failed = true;
-                                 e.printStackTrace();
-                             }
-                             if (!failed) {
-                                 throw new Exception(
-                                         "Test \"" + testFile + "\" FAILED!\n  An exception" + "is expected.");
-                             }
-                             System.err.println("...but that was expected.");
-                             break;
-                         case "script":
-                             try {
-                                 String output = executeScript(pb, getScriptPath(testFile.getAbsolutePath(),
-                                         pb.environment().get("SCRIPT_HOME"), statement.trim()));
-                                 if (output.contains("ERROR")) {
-                                     throw new Exception(output);
-                                 }
-                             } catch (Exception e) {
-                                 throw new Exception("Test \"" + testFile + "\" FAILED!\n", e);
-                             }
-                             break;
-                         case "sleep":
-                             String[] lines = statement.split("\n");
-                             Thread.sleep(Long.parseLong(lines[lines.length - 1].trim()));
-                             break;
-                         case "errddl": // a ddlquery that expects error
-                             try {
-                                 executeDDL(statement, "http://" + host + ":" + port + Servlets.AQL_DDL.getPath());
-                             } catch (Exception e) {
-                                 // expected error happens
-                                 failed = true;
-                                 e.printStackTrace();
-                             }
-                             if (!failed) {
-                                 throw new Exception("Test \"" + testFile + "\" FAILED!\n  An exception is expected.");
-                             }
-                             System.err.println("...but that was expected.");
-                             break;
-                         case "vscript": // a script that will be executed on a vagrant virtual node
-                             try {
-                                 String[] command = statement.trim().split(" ");
-                                 if (command.length != 2) {
-                                     throw new Exception("invalid vagrant script format");
-                                 }
-                                 String nodeId = command[0];
-                                 String scriptName = command[1];
-                                 String output = executeVagrantScript(pb, nodeId, scriptName);
-                                 if (output.contains("ERROR")) {
-                                     throw new Exception(output);
-                                 }
-                             } catch (Exception e) {
-                                 throw new Exception("Test \"" + testFile + "\" FAILED!\n", e);
-                             }
-                             break;
-                         case "vmgx": // a managix command that will be executed on vagrant cc node
-                             try {
-                                 String output = executeVagrantManagix(pb, statement);
-                                 if (output.contains("ERROR")) {
-                                     throw new Exception(output);
-                                 }
-                             } catch (Exception e) {
-                                 throw new Exception("Test \"" + testFile + "\" FAILED!\n", e);
-                             }
-                             break;
-                         case "cstate": // cluster state query
-                             try {
-                                 fmt = OutputFormat.forCompilationUnit(cUnit);
-                                 resultStream = executeClusterStateQuery(fmt,
-                                         "http://" + host + ":" + port + Servlets.CLUSTER_STATE.getPath());
-                                 expectedResultFile = expectedResultFileCtxs.get(queryCount).getFile();
-                                 actualResultFile = testCaseCtx.getActualResultFile(cUnit, new File(actualPath));
-                                 actualResultFile.getParentFile().mkdirs();
-                                 writeOutputToFile(actualResultFile, resultStream);
-                                 runScriptAndCompareWithResult(testFile, new PrintWriter(System.err), expectedResultFile,
-                                         actualResultFile);
-                                 queryCount++;
-                             } catch (Exception e) {
-                                 throw new Exception("Test \"" + testFile + "\" FAILED!\n", e);
-                             }
-                             break;
-                         case "server": // (start <test server name> <port>
-                                        // [<arg1>][<arg2>][<arg3>]...|stop (<port>|all))
-                             try {
-                                 lines = statement.trim().split("\n");
-                                 String[] command = lines[lines.length - 1].trim().split(" ");
-                                 if (command.length < 2) {
-                                     throw new Exception("invalid server command format. expected format ="
-                                             + " (start <test server name> <port> [<arg1>][<arg2>][<arg3>]"
-                                             + "...|stop (<port>|all))");
-                                 }
-                                 String action = command[0];
-                                 if (action.equals("start")) {
-                                     if (command.length < 3) {
-                                         throw new Exception("invalid server start command. expected format ="
-                                                 + " (start <test server name> <port> [<arg1>][<arg2>][<arg3>]...");
-                                     }
-                                     String name = command[1];
-                                     Integer port = new Integer(command[2]);
-                                     if (runningTestServers.containsKey(port)) {
-                                         throw new Exception("server with port " + port + " is already running");
-                                     }
-                                     ITestServer server = TestServerProvider.createTestServer(name, port);
-                                     server.configure(Arrays.copyOfRange(command, 3, command.length));
-                                     server.start();
-                                     runningTestServers.put(port, server);
-                                 } else if (action.equals("stop")) {
-                                     String target = command[1];
-                                     if (target.equals("all")) {
-                                         for (ITestServer server : runningTestServers.values()) {
-                                             server.stop();
-                                         }
-                                         runningTestServers.clear();
-                                     } else {
-                                         Integer port = new Integer(command[1]);
-                                         ITestServer server = runningTestServers.get(port);
-                                         if (server == null) {
-                                             throw new Exception("no server is listening to port " + port);
-                                         }
-                                         server.stop();
-                                         runningTestServers.remove(port);
-                                     }
-                                 } else {
-                                     throw new Exception("unknown server action");
-                                 }
-                             } catch (Exception e) {
-                                 throw new Exception("Test \"" + testFile + "\" FAILED!\n", e);
-                             }
-                             break;
-                         case "lib": // expected format <dataverse-name> <library-name>
-                                     // <library-directory>
-                                     // TODO: make this case work well with entity names containing spaces by
-                                     // looking for \"
-                             lines = statement.split("\n");
-                             String lastLine = lines[lines.length - 1];
-                             String[] command = lastLine.trim().split(" ");
-                             if (command.length < 3) {
-                                 throw new Exception("invalid library format");
-                             }
-                             String dataverse = command[1];
-                             String library = command[2];
-                             switch (command[0]) {
-                                 case "install":
-                                     if (command.length != 4) {
-                                         throw new Exception("invalid library format");
-                                     }
-                                     String libPath = command[3];
-                                     librarian.install(dataverse, library, libPath);
-                                     break;
-                                 case "uninstall":
-                                     if (command.length != 3) {
-                                         throw new Exception("invalid library format");
-                                     }
-                                     librarian.uninstall(dataverse, library);
-                                     break;
-                                 default:
-                                     throw new Exception("invalid library format");
-                             }
-                             break;
-                         default:
-                             throw new IllegalArgumentException("No statements of type " + ctx.getType());
-                     }
- 
++                    executeTest(testCaseCtx, ctx, statement, isDmlRecoveryTest, pb, cUnit, queryCount,
++                            expectedResultFileCtxs, testFile, actualPath);
 +                } catch (Exception e) {
 +                    System.err.println("testFile " + testFile.toString() + " raised an exception:");
 +                    boolean unExpectedFailure = false;
 +                    numOfErrors++;
 +                    if (cUnit.getExpectedError().size() < numOfErrors) {
 +                        unExpectedFailure = true;
 +                    } else {
 +                        // Get the expected exception
 +                        String expectedError = cUnit.getExpectedError().get(numOfErrors - 1);
 +                        if (e.toString().contains(expectedError)) {
 +                            System.err.println("...but that was expected.");
 +                        } else {
 +                            unExpectedFailure = true;
 +                        }
 +                    }
 +                    if (unExpectedFailure) {
 +                        e.printStackTrace();
 +                        System.err.println("...Unexpected!");
 +                        if (failedGroup != null) {
 +                            failedGroup.getTestCase().add(testCaseCtx.getTestCase());
 +                        }
 +                        throw new Exception("Test \"" + testFile + "\" FAILED!", e);
 +                    }
 +                } finally {
 +                    if (numOfFiles == testFileCtxs.size() && numOfErrors < cUnit.getExpectedError().size()) {
 +                        System.err.println("...Unexpected!");
 +                        Exception e = new Exception(
 +                                "Test \"" + cUnit.getName() + "\" FAILED!\nExpected error was not thrown...");
 +                        e.printStackTrace();
 +                        throw e;
 +                    } else if (numOfFiles == testFileCtxs.size()) {
 +                        LOGGER.info("[TEST]: " + testCaseCtx.getTestCase().getFilePath() + "/" + cUnit.getName()
 +                                + " PASSED ");
 +                    }
 +                }
 +            }
 +        }
 +    }
++
++    private static File getTestCaseQueryBeforeCrashFile(String actualPath, TestCaseContext testCaseCtx,
++            CompilationUnit cUnit) {
++        return new File(
++                actualPath + File.separator + testCaseCtx.getTestCase().getFilePath().replace(File.separator, "_") + "_"
++                        + cUnit.getName() + "_qbc.adm");
++    }
 +}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-external-data/pom.xml
----------------------------------------------------------------------
diff --cc asterixdb/asterix-external-data/pom.xml
index 9a8540f,0000000..8c59cc4
mode 100644,000000..100644
--- a/asterixdb/asterix-external-data/pom.xml
+++ b/asterixdb/asterix-external-data/pom.xml
@@@ -1,298 -1,0 +1,298 @@@
 +<!-- 
 + ! Licensed to the Apache Software Foundation (ASF) under one
 + ! or more contributor license agreements.  See the NOTICE file
 + ! distributed with this work for additional information
 + ! regarding copyright ownership.  The ASF licenses this file
 + ! to you under the Apache License, Version 2.0 (the
 + ! "License"); you may not use this file except in compliance
 + ! with the License.  You may obtain a copy of the License at
 + !
 + !   http://www.apache.org/licenses/LICENSE-2.0
 + !
 + ! Unless required by applicable law or agreed to in writing,
 + ! software distributed under the License is distributed on an
 + ! "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + ! KIND, either express or implied.  See the License for the
 + ! specific language governing permissions and limitations
 + ! under the License.
 + !-->
 +<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
 +    <modelVersion>4.0.0</modelVersion>
 +    <parent>
 +        <artifactId>apache-asterixdb</artifactId>
 +        <groupId>org.apache.asterix</groupId>
 +        <version>0.8.9-SNAPSHOT</version>
 +    </parent>
 +    <licenses>
 +        <license>
 +            <name>Apache License, Version 2.0</name>
 +            <url>http://www.apache.org/licenses/LICENSE-2.0.txt</url>
 +            <distribution>repo</distribution>
 +            <comments>A business-friendly OSS license</comments>
 +        </license>
 +    </licenses>
 +    <artifactId>asterix-external-data</artifactId>
 +    <properties>
 +        <appendedResourcesDirectory>${basedir}/../src/main/appended-resources</appendedResourcesDirectory>
 +    </properties>
 +    <build>
 +        <plugins>
 +            <plugin>
 +                <groupId>org.apache.asterix</groupId>
 +                <artifactId>lexer-generator-maven-plugin</artifactId>
 +                <version>0.8.9-SNAPSHOT</version>
 +                <configuration>
 +                    <grammarFile>src/main/resources/adm.grammar</grammarFile>
 +                    <outputDir>${project.build.directory}/generated-sources/org/apache/asterix/runtime/operators/file/adm</outputDir>
 +                </configuration>
 +                <executions>
 +                    <execution>
 +                        <id>generate-lexer</id>
 +                        <phase>generate-sources</phase>
 +                        <goals>
 +                            <goal>generate-lexer</goal>
 +                        </goals>
 +                    </execution>
 +                </executions>
 +            </plugin>
 +            <plugin>
 +                <groupId>org.codehaus.mojo</groupId>
 +                <artifactId>build-helper-maven-plugin</artifactId>
 +                <version>1.9</version>
 +                <executions>
 +                    <execution>
 +                        <id>add-source</id>
 +                        <phase>generate-sources</phase>
 +                        <goals>
 +                            <goal>add-source</goal>
 +                        </goals>
 +                        <configuration>
 +                            <sources>
 +                                <source>${project.build.directory}/generated-sources/</source>
 +                            </sources>
 +                        </configuration>
 +                    </execution>
 +                </executions>
 +            </plugin>
 +            <plugin>
 +                <groupId>org.jvnet.jaxb2.maven2</groupId>
 +                <artifactId>maven-jaxb2-plugin</artifactId>
 +                <version>0.9.0</version>
 +                <executions>
 +                    <execution>
 +                        <id>configuration</id>
 +                        <goals>
 +                            <goal>generate</goal>
 +                        </goals>
 +                        <configuration>
 +                            <schemaDirectory>src/main/resources/schema</schemaDirectory>
 +                            <schemaIncludes>
 +                                <include>library.xsd</include>
 +                            </schemaIncludes>
 +                            <generatePackage>org.apache.asterix.external.library</generatePackage>
 +                            <generateDirectory>${project.build.directory}/generated-sources/configuration</generateDirectory>
 +                        </configuration>
 +                    </execution>
 +                </executions>
 +            </plugin>
 +         <plugin>
 +            <groupId>org.apache.maven.plugins</groupId>
 +            <artifactId>maven-jar-plugin</artifactId>
 +            <version>2.4</version>
 +            <configuration>
 +               <includes>
 +                  <include>**/*.class</include>
 +                  <include>**/*.txt</include>
 +                  <include>**/DISCLAIMER</include>
 +                  <include>**/NOTICE</include>
 +                  <include>**/LICENSE</include>
 +                  <include>**/DEPENDENCIES</include>
 +               </includes>
 +            </configuration>
 +            <executions>
 +               <execution>
 +                  <goals>
 +                     <goal>test-jar</goal>
 +                  </goals>
 +                  <phase>package</phase>
 +               </execution>
 +            </executions>
 +         </plugin>
 +         <plugin>
 +            <artifactId>maven-assembly-plugin</artifactId>
 +            <version>2.2-beta-5</version>
 +            <executions>
 +               <execution>
 +                  <configuration>
 +                     <descriptor>src/main/assembly/binary-assembly-libzip.xml</descriptor>
 +                     <finalName>testlib-zip</finalName>
 +                  </configuration>
 +                  <phase>package</phase>
 +                  <goals>
 +                     <goal>attached</goal>
 +                  </goals>
 +               </execution>
 +            </executions>
 +         </plugin>
 +        </plugins>
 +        <pluginManagement>
 +            <plugins>
 +                <!--This plugin's configuration is used to store Eclipse m2e settings only. It has no influence on the Maven build itself.-->
 +                <plugin>
 +                    <groupId>org.eclipse.m2e</groupId>
 +                    <artifactId>lifecycle-mapping</artifactId>
 +                    <version>1.0.0</version>
 +                    <configuration>
 +                        <lifecycleMappingMetadata>
 +                            <pluginExecutions>
 +                                <pluginExecution>
 +                                    <pluginExecutionFilter>
 +                                        <groupId> org.apache.asterix</groupId>
 +                                        <artifactId> lexer-generator-maven-plugin</artifactId>
 +                                        <versionRange>[0.1,)</versionRange>
 +                                        <goals>
 +                                            <goal>generate-lexer</goal>
 +                                        </goals>
 +                                    </pluginExecutionFilter>
 +                                    <action>
 +                                        <execute>
 +                                            <runOnIncremental>false</runOnIncremental>
 +                                        </execute>
 +                                    </action>
 +                                </pluginExecution>
 +                                <pluginExecution>
 +                                    <pluginExecutionFilter>
 +                                        <groupId> org.codehaus.mojo</groupId>
 +                                        <artifactId>build-helper-maven-plugin</artifactId>
 +                                        <versionRange>[1.7,)</versionRange>
 +                                        <goals>
 +                                            <goal>add-source</goal>
 +                                        </goals>
 +                                    </pluginExecutionFilter>
 +                                    <action>
 +                                        <ignore />
 +                                    </action>
 +                                </pluginExecution>
 +                            </pluginExecutions>
 +                        </lifecycleMappingMetadata>
 +                    </configuration>
 +                </plugin>
 +            </plugins>
 +        </pluginManagement>
 +    </build>
 +    <dependencies>
 +        <dependency>
 +            <groupId>javax.servlet</groupId>
 +            <artifactId>servlet-api</artifactId>
 +            <type>jar</type>
 +        </dependency>
 +        <dependency>
 +            <groupId>org.apache.asterix</groupId>
 +            <artifactId>asterix-om</artifactId>
 +            <version>0.8.9-SNAPSHOT</version>
 +            <type>jar</type>
 +            <scope>compile</scope>
 +        </dependency>
 +        <dependency>
 +            <groupId>org.apache.asterix</groupId>
 +            <artifactId>asterix-runtime</artifactId>
 +            <version>0.8.9-SNAPSHOT</version>
 +            <type>jar</type>
 +            <scope>compile</scope>
 +        </dependency>
 +        <dependency>
 +            <groupId>org.apache.hyracks</groupId>
 +            <artifactId>algebricks-compiler</artifactId>
 +        </dependency>
 +        <dependency>
 +            <groupId>com.kenai.nbpwr</groupId>
 +            <artifactId>org-apache-commons-io</artifactId>
 +            <version>1.3.1-201002241208</version>
 +            <scope>test</scope>
 +        </dependency>
 +        <dependency>
 +            <groupId>org.twitter4j</groupId>
 +            <artifactId>twitter4j-core</artifactId>
-             <version>[4.0,)</version>
++            <version>4.0.3</version>
 +        </dependency>
 +        <dependency>
 +            <groupId>org.twitter4j</groupId>
 +            <artifactId>twitter4j-stream</artifactId>
-             <version>[4.0,)</version>
++            <version>4.0.3</version>
 +        </dependency>
 +        <dependency>
 +            <groupId>org.apache.hadoop</groupId>
 +            <artifactId>hadoop-client</artifactId>
 +            <type>jar</type>
 +            <scope>compile</scope>
 +        </dependency>
 +        <dependency>
 +            <groupId>org.apache.hadoop</groupId>
 +            <artifactId>hadoop-hdfs</artifactId>
 +        </dependency>
 +        <dependency>
 +            <groupId>net.java.dev.rome</groupId>
 +            <artifactId>rome-fetcher</artifactId>
 +            <version>1.0.0</version>
 +            <type>jar</type>
 +            <scope>compile</scope>
 +            <exclusions>
 +                <exclusion>
 +                    <artifactId>rome</artifactId>
 +                    <groupId>net.java.dev.rome</groupId>
 +                </exclusion>
 +            </exclusions>
 +        </dependency>
 +        <dependency>
 +            <groupId>rome</groupId>
 +            <artifactId>rome</artifactId>
 +            <version>1.0.1-modified-01</version>
 +        </dependency>
 +        <dependency>
 +            <groupId>org.apache.hyracks</groupId>
 +            <artifactId>hyracks-hdfs-core</artifactId>
 +            <version>${hyracks.version}</version>
 +        </dependency>
 +        <dependency>
 +            <groupId>jdom</groupId>
 +            <artifactId>jdom</artifactId>
 +            <version>1.0</version>
 +        </dependency>
 +        <dependency>
 +            <groupId>org.apache.asterix</groupId>
 +            <artifactId>asterix-common</artifactId>
 +            <version>0.8.9-SNAPSHOT</version>
 +        </dependency>
 +        <dependency>
 +            <groupId>com.microsoft.windowsazure</groupId>
 +            <artifactId>microsoft-windowsazure-api</artifactId>
 +            <version>0.4.4</version>
 +        </dependency>
 +        <dependency>
 +            <groupId>org.apache.hive</groupId>
 +            <artifactId>hive-exec</artifactId>
 +            <version>0.13.0</version>
 +        </dependency>
 +        <dependency>
 +            <groupId>javax.jdo</groupId>
 +            <artifactId>jdo2-api</artifactId>
 +            <version>2.3-20090302111651</version>
 +        </dependency>
 +        <dependency>
 +            <groupId>com.e-movimento.tinytools</groupId>
 +            <artifactId>privilegedaccessor</artifactId>
 +            <version>1.2.2</version>
 +            <scope>test</scope>
 +        </dependency>
 +        <dependency>
 +            <groupId>com.couchbase.client</groupId>
 +            <artifactId>core-io</artifactId>
 +            <version>1.2.3</version>
 +        </dependency>
 +        <dependency>
 +            <groupId>io.reactivex</groupId>
 +            <artifactId>rxjava</artifactId>
 +            <version>1.0.15</version>
 +        </dependency>
 +    </dependencies>
 +</project>

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/adapter/factory/GenericAdapterFactory.java
----------------------------------------------------------------------
diff --cc asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/adapter/factory/GenericAdapterFactory.java
index a03ad1a,0000000..d3abd50
mode 100644,000000..100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/adapter/factory/GenericAdapterFactory.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/adapter/factory/GenericAdapterFactory.java
@@@ -1,162 -1,0 +1,165 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +package org.apache.asterix.external.adapter.factory;
 +
 +import java.util.List;
 +import java.util.Map;
 +
 +import org.apache.asterix.common.exceptions.AsterixException;
 +import org.apache.asterix.external.api.IAdapterFactory;
 +import org.apache.asterix.external.api.IDataFlowController;
 +import org.apache.asterix.external.api.IDataParserFactory;
 +import org.apache.asterix.external.api.IDataSourceAdapter;
 +import org.apache.asterix.external.api.IExternalDataSourceFactory;
 +import org.apache.asterix.external.api.IIndexibleExternalDataSource;
 +import org.apache.asterix.external.api.IIndexingAdapterFactory;
 +import org.apache.asterix.external.dataflow.AbstractFeedDataFlowController;
 +import org.apache.asterix.external.dataset.adapter.FeedAdapter;
 +import org.apache.asterix.external.dataset.adapter.GenericAdapter;
 +import org.apache.asterix.external.indexing.ExternalFile;
 +import org.apache.asterix.external.provider.DataflowControllerProvider;
 +import org.apache.asterix.external.provider.DatasourceFactoryProvider;
 +import org.apache.asterix.external.provider.ParserFactoryProvider;
 +import org.apache.asterix.external.util.ExternalDataCompatibilityUtils;
 +import org.apache.asterix.external.util.ExternalDataConstants;
 +import org.apache.asterix.external.util.ExternalDataUtils;
++import org.apache.asterix.external.util.FeedLogManager;
 +import org.apache.asterix.external.util.FeedUtils;
 +import org.apache.asterix.om.types.ARecordType;
 +import org.apache.hyracks.algebricks.common.constraints.AlgebricksAbsolutePartitionConstraint;
 +import org.apache.hyracks.api.context.IHyracksTaskContext;
 +import org.apache.hyracks.api.exceptions.HyracksDataException;
 +import org.apache.hyracks.dataflow.std.file.FileSplit;
 +
 +public class GenericAdapterFactory implements IIndexingAdapterFactory, IAdapterFactory {
 +
 +    private static final long serialVersionUID = 1L;
 +    private IExternalDataSourceFactory dataSourceFactory;
 +    private IDataParserFactory dataParserFactory;
 +    private ARecordType recordType;
 +    private Map<String, String> configuration;
 +    private List<ExternalFile> files;
 +    private boolean indexingOp;
 +    private boolean isFeed;
 +    private FileSplit[] feedLogFileSplits;
 +    private ARecordType metaType;
++    private FeedLogManager feedLogManager = null;
 +
 +    @Override
 +    public void setSnapshot(List<ExternalFile> files, boolean indexingOp) {
 +        this.files = files;
 +        this.indexingOp = indexingOp;
 +    }
 +
 +    @Override
 +    public String getAlias() {
 +        return ExternalDataConstants.ALIAS_GENERIC_ADAPTER;
 +    }
 +
 +    @Override
 +    public AlgebricksAbsolutePartitionConstraint getPartitionConstraint() throws AsterixException {
 +        return dataSourceFactory.getPartitionConstraint();
 +    }
 +
 +    /**
 +     * Runs on each node controller (after serialization-deserialization)
 +     */
 +    @Override
 +    public synchronized IDataSourceAdapter createAdapter(IHyracksTaskContext ctx, int partition)
 +            throws HyracksDataException {
 +        try {
 +            restoreExternalObjects();
 +        } catch (AsterixException e) {
 +            throw new HyracksDataException(e);
 +        }
++        if (isFeed) {
++            if (feedLogManager == null) {
++                feedLogManager = FeedUtils.getFeedLogManager(ctx, partition, feedLogFileSplits);
++            }
++            feedLogManager.touch();
++        }
 +        IDataFlowController controller = DataflowControllerProvider.getDataflowController(recordType, ctx, partition,
-                 dataSourceFactory, dataParserFactory, configuration, indexingOp, isFeed, feedLogFileSplits);
++                dataSourceFactory, dataParserFactory, configuration, indexingOp, isFeed, feedLogManager);
 +        if (isFeed) {
 +            return new FeedAdapter((AbstractFeedDataFlowController) controller);
 +        } else {
 +            return new GenericAdapter(controller);
 +        }
 +    }
 +
 +    private void restoreExternalObjects() throws AsterixException {
 +        if (dataSourceFactory == null) {
 +            dataSourceFactory = DatasourceFactoryProvider.getExternalDataSourceFactory(configuration);
 +            // create and configure parser factory
 +            if (dataSourceFactory.isIndexible() && (files != null)) {
 +                ((IIndexibleExternalDataSource) dataSourceFactory).setSnapshot(files, indexingOp);
 +            }
 +            dataSourceFactory.configure(configuration);
 +        }
 +        if (dataParserFactory == null) {
 +            // create and configure parser factory
 +            dataParserFactory = ParserFactoryProvider.getDataParserFactory(configuration);
 +            dataParserFactory.setRecordType(recordType);
 +            dataParserFactory.setMetaType(metaType);
 +            dataParserFactory.configure(configuration);
 +        }
 +    }
 +
 +    @Override
 +    public void configure(Map<String, String> configuration, ARecordType outputType, ARecordType metaType)
 +            throws AsterixException {
 +        this.recordType = outputType;
 +        this.metaType = metaType;
 +        this.configuration = configuration;
 +        dataSourceFactory = DatasourceFactoryProvider.getExternalDataSourceFactory(configuration);
- 
 +        dataParserFactory = ParserFactoryProvider.getDataParserFactory(configuration);
-         prepare();
++        if (dataSourceFactory.isIndexible() && (files != null)) {
++            ((IIndexibleExternalDataSource) dataSourceFactory).setSnapshot(files, indexingOp);
++        }
++        dataSourceFactory.configure(configuration);
++        dataParserFactory.setRecordType(recordType);
++        dataParserFactory.setMetaType(metaType);
++        dataParserFactory.configure(configuration);
 +        ExternalDataCompatibilityUtils.validateCompatibility(dataSourceFactory, dataParserFactory);
 +        configureFeedLogManager();
 +        nullifyExternalObjects();
 +    }
 +
 +    private void configureFeedLogManager() throws AsterixException {
 +        this.isFeed = ExternalDataUtils.isFeed(configuration);
 +        if (isFeed) {
 +            feedLogFileSplits = FeedUtils.splitsForAdapter(ExternalDataUtils.getDataverse(configuration),
 +                    ExternalDataUtils.getFeedName(configuration), dataSourceFactory.getPartitionConstraint());
 +        }
 +    }
 +
 +    private void nullifyExternalObjects() {
 +        if (ExternalDataUtils.isExternal(configuration.get(ExternalDataConstants.KEY_READER))) {
 +            dataSourceFactory = null;
 +        }
 +        if (ExternalDataUtils.isExternal(configuration.get(ExternalDataConstants.KEY_PARSER))) {
 +            dataParserFactory = null;
 +        }
 +    }
 +
-     private void prepare() throws AsterixException {
-         if (dataSourceFactory.isIndexible() && (files != null)) {
-             ((IIndexibleExternalDataSource) dataSourceFactory).setSnapshot(files, indexingOp);
-         }
-         dataSourceFactory.configure(configuration);
-         dataParserFactory.setRecordType(recordType);
-         dataParserFactory.setMetaType(metaType);
-         dataParserFactory.configure(configuration);
-     }
- 
 +    @Override
 +    public ARecordType getAdapterOutputType() {
 +        return recordType;
 +    }
 +}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/api/AsterixInputStream.java
----------------------------------------------------------------------
diff --cc asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/api/AsterixInputStream.java
index 83d7a3a,0000000..a4c2fae
mode 100644,000000..100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/api/AsterixInputStream.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/api/AsterixInputStream.java
@@@ -1,44 -1,0 +1,50 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +package org.apache.asterix.external.api;
 +
 +import java.io.InputStream;
 +
 +import org.apache.asterix.external.dataflow.AbstractFeedDataFlowController;
 +import org.apache.asterix.external.util.FeedLogManager;
++import org.apache.hyracks.api.exceptions.HyracksDataException;
 +
 +public abstract class AsterixInputStream extends InputStream {
 +
 +    protected AbstractFeedDataFlowController controller;
 +    protected FeedLogManager logManager;
++    protected IStreamNotificationHandler notificationHandler;
 +
 +    public abstract boolean stop() throws Exception;
 +
 +    public abstract boolean handleException(Throwable th);
 +
 +    // TODO: Find a better way to send notifications
 +    public void setController(AbstractFeedDataFlowController controller) {
 +        this.controller = controller;
 +    }
 +
 +    // TODO: Find a better way to send notifications
-     public void setFeedLogManager(FeedLogManager logManager) {
++    public void setFeedLogManager(FeedLogManager logManager) throws HyracksDataException {
 +        this.logManager = logManager;
 +    }
++
++    public void setNotificationHandler(IStreamNotificationHandler notificationHandler) {
++        this.notificationHandler = notificationHandler;
++    }
 +}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/api/IRecordReader.java
----------------------------------------------------------------------
diff --cc asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/api/IRecordReader.java
index 11e2472,0000000..9cce1c9
mode 100644,000000..100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/api/IRecordReader.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/api/IRecordReader.java
@@@ -1,71 -1,0 +1,76 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +package org.apache.asterix.external.api;
 +
 +import java.io.Closeable;
 +import java.io.IOException;
 +
 +import org.apache.asterix.external.dataflow.AbstractFeedDataFlowController;
 +import org.apache.asterix.external.util.FeedLogManager;
++import org.apache.hyracks.api.exceptions.HyracksDataException;
 +
 +/**
 + * This interface represents a record reader that reads data from external source as a set of records
++ *
 + * @param <T>
 + */
 +public interface IRecordReader<T> extends Closeable {
 +
 +    /**
 +     * @return true if the reader has more records remaining, false, otherwise.
 +     * @throws Exception
-      *         if an error takes place
++     *             if an error takes place
 +     */
 +    public boolean hasNext() throws Exception;
 +
 +    /**
 +     * @return the object representing the next record.
 +     * @throws IOException
 +     * @throws InterruptedException
 +     */
 +    public IRawRecord<T> next() throws IOException, InterruptedException;
 +
 +    /**
 +     * used to stop reader from producing more records.
++     *
 +     * @return true if the connection to the external source has been suspended, false otherwise.
 +     */
 +    public boolean stop();
 +
 +    // TODO: Find a better way to do flushes, this doesn't fit here
 +    /**
 +     * set a pointer to the controller of the feed. the controller can be used to flush()
 +     * parsed records when waiting for more records to be pushed
 +     */
 +    public void setController(AbstractFeedDataFlowController controller);
 +
 +    // TODO: Find a better way to perform logging. this doesn't fit here
 +    /**
 +     * set a pointer to the log manager of the feed. the log manager can be used to log
 +     * progress and errors
++     *
++     * @throws HyracksDataException
 +     */
-     public void setFeedLogManager(FeedLogManager feedLogManager);
++    public void setFeedLogManager(FeedLogManager feedLogManager) throws HyracksDataException;
 +
 +    /**
 +     * gives the record reader a chance to recover from IO errors during feed intake
 +     */
 +    public boolean handleException(Throwable th);
 +}


[10/50] [abbrv] incubator-asterixdb git commit: Merge branch 'master' into hyracks-merge2

Posted by im...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-installer/src/test/resources/transactionts/queries/query_after_restart/external_index/external_index.2.ddl.aql
----------------------------------------------------------------------
diff --cc asterixdb/asterix-installer/src/test/resources/transactionts/queries/query_after_restart/external_index/external_index.2.ddl.aql
index 7aa1129,0000000..e6b7e21
mode 100644,000000..100644
--- a/asterixdb/asterix-installer/src/test/resources/transactionts/queries/query_after_restart/external_index/external_index.2.ddl.aql
+++ b/asterixdb/asterix-installer/src/test/resources/transactionts/queries/query_after_restart/external_index/external_index.2.ddl.aql
@@@ -1,42 -1,0 +1,46 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +/*
 +* Description  : Create an external dataset that contains records stored with text hdfs file format.
 +                 Build an index over the external dataset age attribute
 +                 Perform a query over the dataset using the index.
 +* Expected Res : Success
 +* Date         : 3rd Jan 2014
 +*/
 +drop dataverse test if exists;
 +create dataverse test;
 +
 +use dataverse test;
 +
 +create type EmployeeType as closed {
 + id: int64,
 + name: string,
 + age: int64
 +};
 +
 +create external dataset EmployeeDataset(EmployeeType)
 +using hdfs
- (("hdfs"="hdfs://127.0.0.1:31888"),("path"="/asterix/external-indexing-test.txt"),("input-format"="text-input-format"),("format"="delimited-text"),("delimiter"="|"));
++(("hdfs"="hdfs://127.0.0.1:31888"),
++("path"="/asterix/external-indexing-test.txt"),
++("input-format"="text-input-format"),
++("format"="delimited-text"),
++("delimiter"="|"));
 +
 +create index EmployeeAgeIdx on EmployeeDataset(age);
 +

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-installer/src/test/resources/transactionts/testsuite.xml
----------------------------------------------------------------------
diff --cc asterixdb/asterix-installer/src/test/resources/transactionts/testsuite.xml
index 0635e7c,0000000..0c12426
mode 100644,000000..100644
--- a/asterixdb/asterix-installer/src/test/resources/transactionts/testsuite.xml
+++ b/asterixdb/asterix-installer/src/test/resources/transactionts/testsuite.xml
@@@ -1,199 -1,0 +1,203 @@@
 +<!--
 + ! Licensed to the Apache Software Foundation (ASF) under one
 + ! or more contributor license agreements.  See the NOTICE file
 + ! distributed with this work for additional information
 + ! regarding copyright ownership.  The ASF licenses this file
 + ! to you under the Apache License, Version 2.0 (the
 + ! "License"); you may not use this file except in compliance
 + ! with the License.  You may obtain a copy of the License at
 + !
 + !   http://www.apache.org/licenses/LICENSE-2.0
 + !
 + ! Unless required by applicable law or agreed to in writing,
 + ! software distributed under the License is distributed on an
 + ! "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + ! KIND, either express or implied.  See the License for the
 + ! specific language governing permissions and limitations
 + ! under the License.
 + !-->
 +<test-suite xmlns="urn:xml.testframework.asterix.apache.org" ResultOffsetPath="results" QueryOffsetPath="queries" QueryFileExtension=".aql">
- 
 +  <test-group name="query_after_restart">
 +      <test-case FilePath="query_after_restart">
++          <compilation-unit name="dataset-with-meta-record">
++              <output-dir compare="Text">dataset-with-meta-record</output-dir>
++          </compilation-unit>
++      </test-case>
++      <test-case FilePath="query_after_restart">
 +          <compilation-unit name="external_index">
 +              <output-dir compare="Text">external_index</output-dir>
 +          </compilation-unit>
 +      </test-case>
 +  </test-group>
 +  <test-group name="recover_after_abort">
 +
 +    <test-case FilePath="recover_after_abort">
 +      <compilation-unit name="primary_index_only">
 +        <output-dir compare="Text">primary_index_only</output-dir>
 +      </compilation-unit>
 +    </test-case>
 +
 +    <test-case FilePath="recover_after_abort">
 +      <compilation-unit name="primary_plus_default_secondary_index">
 +        <output-dir compare="Text">primary_plus_default_secondary_index</output-dir>
 +      </compilation-unit>
 +    </test-case>
 +
 +    <test-case FilePath="recover_after_abort">
 +      <compilation-unit name="primary_plus_rtree_index">
 +        <output-dir compare="Text">primary_plus_rtree_index</output-dir>
 +      </compilation-unit>
 +    </test-case>
 +
 +    <test-case FilePath="recover_after_abort">
 +      <compilation-unit name="primary_plus_keyword_secondary_index">
 +        <output-dir compare="Text">primary_plus_keyword_secondary_index</output-dir>
 +      </compilation-unit>
 +    </test-case>
 +
 +    <test-case FilePath="recover_after_abort">
 +      <compilation-unit name="primary_plus_ngram_index">
 +        <output-dir compare="Text">primary_plus_ngram_index</output-dir>
 +      </compilation-unit>
 +    </test-case>
 + 
 +	<test-case FilePath="recover_after_abort">
 +      <compilation-unit name="primary_plus_multiple_secondary_indices">
 +        <output-dir compare="Text">primary_plus_multiple_secondary_indices</output-dir>
 +      </compilation-unit>
 +    </test-case>
 +  
 +    <test-case FilePath="recover_after_abort">
 +      <compilation-unit name="temp_primary_index_only">
 +        <output-dir compare="Text">primary_index_only</output-dir>
 +        <expected-error>org.apache.hyracks.algebricks.common.exceptions.AlgebricksException</expected-error>
 +      </compilation-unit>
 +    </test-case>
 +
 +    <test-case FilePath="recover_after_abort">
 +      <compilation-unit name="temp_primary_plus_default_secondary_index">
 +        <output-dir compare="Text">primary_plus_default_secondary_index</output-dir>
 +        <expected-error>org.apache.hyracks.algebricks.common.exceptions.AlgebricksException</expected-error>
 +      </compilation-unit>
 +    </test-case>
 +
 +    <test-case FilePath="recover_after_abort">
 +      <compilation-unit name="temp_primary_plus_rtree_index">
 +        <output-dir compare="Text">primary_plus_rtree_index</output-dir>
 +        <expected-error>org.apache.hyracks.algebricks.common.exceptions.AlgebricksException</expected-error>
 +      </compilation-unit>
 +    </test-case>
 +
 +    <test-case FilePath="recover_after_abort">
 +      <compilation-unit name="temp_primary_plus_keyword_secondary_index">
 +        <output-dir compare="Text">primary_plus_keyword_secondary_index</output-dir>
 +        <expected-error>org.apache.hyracks.algebricks.common.exceptions.AlgebricksException: Could not find dataset Fragile in dataverse recovery</expected-error>
 +      </compilation-unit>
 +    </test-case>
 +
 +    <test-case FilePath="recover_after_abort">
 +      <compilation-unit name="temp_primary_plus_ngram_index">
 +        <output-dir compare="Text">primary_plus_ngram_index</output-dir>
 +        <expected-error>org.apache.hyracks.algebricks.common.exceptions.AlgebricksException</expected-error>
 +      </compilation-unit>
 +    </test-case>
 +
 +    <test-case FilePath="recover_after_abort">
 +      <compilation-unit name="primary_plus_multiple_secondary_indices"><!-- The only exception here is during the kill command which is in a different JVM, hence not caught -->
 +        <output-dir compare="Text">primary_plus_multiple_secondary_indices</output-dir>
 +        <!-- <expected-error>org.apache.hyracks.algebricks.common.exceptions.AlgebricksException</expected-error> -->
 +      </compilation-unit>
 +    </test-case>
 + </test-group>
 +
 + <test-group name="recovery_ddl">
 +
 +    <test-case FilePath="recovery_ddl">
 +      <compilation-unit name="dataverse_recovery">
 +        <output-dir compare="Text">dataverse_recovery</output-dir>
 +      </compilation-unit>
 +    </test-case>
 +
 +    <test-case FilePath="recovery_ddl">
 +      <compilation-unit name="datatype_recovery">
 +        <output-dir compare="Text">datatype_recovery</output-dir>
 +      </compilation-unit>
 +    </test-case>
 +
 +    <test-case FilePath="recovery_ddl">
 +      <compilation-unit name="dataset_recovery">
 +        <output-dir compare="Text">dataset_recovery</output-dir>
 +      </compilation-unit>
 +    </test-case>
 +
 +    <test-case FilePath="recovery_ddl">
 +      <compilation-unit name="secondary_index_recovery">
 +        <output-dir compare="Text">secondary_index_recovery</output-dir>
 +      </compilation-unit>
 +    </test-case>
 +
 +    <test-case FilePath="recovery_ddl">
 +      <compilation-unit name="load_after_recovery">
 +        <output-dir compare="Text">load_after_recovery</output-dir>
 +      </compilation-unit>
 +    </test-case>
 +
 +    <test-case FilePath="recovery_ddl">
 +      <compilation-unit name="insert_after_recovery">
 +        <output-dir compare="Text">insert_after_recovery</output-dir>
 +      </compilation-unit>
 +    </test-case>
 +
 +    <test-case FilePath="recovery_ddl">
 +      <compilation-unit name="delete_after_recovery">
 +        <output-dir compare="Text">delete_after_recovery</output-dir>
 +      </compilation-unit>
 +    </test-case>
 +
 +    <test-case FilePath="recovery_ddl">
 +      <compilation-unit name="function_recovery">
 +        <output-dir compare="Text">function_recovery</output-dir>
 +      </compilation-unit>
 +    </test-case>
 +
 +    <test-case FilePath="recovery_ddl">
 +      <compilation-unit name="temp_dataset_recovery">
 +        <output-dir compare="Text">dataset_recovery</output-dir>
 +        <!-- <expected-error>org.apache.hyracks.algebricks.common.exceptions.AlgebricksException</expected-error> -->
 +      </compilation-unit>
 +    </test-case>
 +
 +    <test-case FilePath="recovery_ddl">
 +      <compilation-unit name="temp_delete_after_recovery">
 +        <output-dir compare="Text">delete_after_recovery</output-dir>
 +        <expected-error>org.apache.hyracks.algebricks.common.exceptions.AlgebricksException: Cannot find dataset Fragile_raw in dataverse recovery</expected-error>
 +        <expected-error>org.apache.hyracks.algebricks.common.exceptions.AlgebricksException: Could not find dataset Fragile_raw in dataverse recovery</expected-error>
 +      </compilation-unit>
 +    </test-case>
 +
 +    <test-case FilePath="recovery_ddl">
 +      <compilation-unit name="temp_insert_after_recovery">
 +        <output-dir compare="Text">insert_after_recovery</output-dir>
 +        <expected-error>org.apache.hyracks.algebricks.common.exceptions.AlgebricksException</expected-error>
 +        <expected-error>org.apache.hyracks.algebricks.common.exceptions.AlgebricksException: Could not find dataset Fragile in dataverse recovery</expected-error>
 +      </compilation-unit>
 +    </test-case>
 +
 +    <test-case FilePath="recovery_ddl">
 +      <compilation-unit name="temp_load_after_recovery">
 +        <output-dir compare="Text">load_after_recovery</output-dir>
 +        <expected-error>org.apache.hyracks.algebricks.common.exceptions.AlgebricksException</expected-error>
 +        <expected-error>org.apache.hyracks.algebricks.common.exceptions.AlgebricksException: Could not find dataset Fragile_raw in dataverse recovery</expected-error>
 +      </compilation-unit>
 +    </test-case>
 +
 +    <test-case FilePath="recovery_ddl">
 +      <compilation-unit name="temp_secondary_index_recovery">
 +        <output-dir compare="Text">secondary_index_recovery</output-dir>
 +        <!-- <expected-error>org.apache.hyracks.algebricks.common.exceptions.AlgebricksException</expected-error> -->
 +      </compilation-unit>
 +    </test-case>
 + 
 +  </test-group>
 +
 +</test-suite>

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-lang-common/pom.xml
----------------------------------------------------------------------
diff --cc asterixdb/asterix-lang-common/pom.xml
index d1a5f80,0000000..dd5035f
mode 100644,000000..100644
--- a/asterixdb/asterix-lang-common/pom.xml
+++ b/asterixdb/asterix-lang-common/pom.xml
@@@ -1,84 -1,0 +1,84 @@@
 +<!--
 + ! Licensed to the Apache Software Foundation (ASF) under one
 + ! or more contributor license agreements.  See the NOTICE file
 + ! distributed with this work for additional information
 + ! regarding copyright ownership.  The ASF licenses this file
 + ! to you under the Apache License, Version 2.0 (the
 + ! "License"); you may not use this file except in compliance
 + ! with the License.  You may obtain a copy of the License at
 + !
 + !   http://www.apache.org/licenses/LICENSE-2.0
 + !
 + ! Unless required by applicable law or agreed to in writing,
 + ! software distributed under the License is distributed on an
 + ! "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + ! KIND, either express or implied.  See the License for the
 + ! specific language governing permissions and limitations
 + ! under the License.
 + !-->
 +<project
 +    xmlns="http://maven.apache.org/POM/4.0.0"
 +    xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
 +    xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
 +    <modelVersion>4.0.0</modelVersion>
 +    <parent>
 +        <artifactId>apache-asterixdb</artifactId>
 +        <groupId>org.apache.asterix</groupId>
 +        <version>0.8.9-SNAPSHOT</version>
 +    </parent>
 +
 +    <licenses>
 +        <license>
 +            <name>Apache License, Version 2.0</name>
 +            <url>http://www.apache.org/licenses/LICENSE-2.0.txt</url>
 +            <distribution>repo</distribution>
 +            <comments>A business-friendly OSS license</comments>
 +        </license>
 +    </licenses>
 +
 +    <artifactId>asterix-lang-common</artifactId>
 +    <build>
 +        <plugins>
 +            <plugin>
 +                <groupId>org.codehaus.mojo</groupId>
 +                <artifactId>build-helper-maven-plugin</artifactId>
 +                <version>1.9</version>
 +                <executions>
 +                    <execution>
 +                        <id>add-source</id>
 +                        <phase>generate-sources</phase>
 +                        <goals>
 +                            <goal>add-source</goal>
 +                        </goals>
 +                        <configuration>
 +                            <sources>
 +                                <source>${project.build.directory}/generated-sources/javacc/</source>
 +                            </sources>
 +                        </configuration>
 +                    </execution>
 +                </executions>
 +            </plugin>
 +        </plugins>
 +    </build>
 +
 +    <dependencies>
 +        <dependency>
 +            <groupId>org.apache.asterix</groupId>
 +            <artifactId>asterix-common</artifactId>
 +            <version>0.8.9-SNAPSHOT</version>
 +            <scope>compile</scope>
 +        </dependency>
 +        <dependency>
 +            <groupId>org.apache.asterix</groupId>
 +            <artifactId>asterix-metadata</artifactId>
 +            <version>0.8.9-SNAPSHOT</version>
 +            <scope>compile</scope>
 +        </dependency>
 +        <dependency>
 +            <groupId>xerces</groupId>
-             <artifactId>xerces</artifactId>
-             <version>2.4.0</version>
++            <artifactId>xercesImpl</artifactId>
++            <version>2.9.1</version>
 +        </dependency>
 +    </dependencies>
 +
 +</project>

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/clause/GroupbyClause.java
----------------------------------------------------------------------
diff --cc asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/clause/GroupbyClause.java
index 50e0ae2,0000000..b5d78e2
mode 100644,000000..100644
--- a/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/clause/GroupbyClause.java
+++ b/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/clause/GroupbyClause.java
@@@ -1,131 -1,0 +1,143 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +package org.apache.asterix.lang.common.clause;
 +
 +import java.util.ArrayList;
 +import java.util.List;
 +
 +import org.apache.asterix.common.exceptions.AsterixException;
 +import org.apache.asterix.lang.common.base.Clause;
 +import org.apache.asterix.lang.common.base.Expression;
 +import org.apache.asterix.lang.common.expression.GbyVariableExpressionPair;
 +import org.apache.asterix.lang.common.expression.VariableExpr;
 +import org.apache.asterix.lang.common.struct.Identifier;
 +import org.apache.asterix.lang.common.visitor.base.ILangVisitor;
 +import org.apache.hyracks.algebricks.common.utils.Pair;
 +
 +public class GroupbyClause implements Clause {
 +
 +    private List<GbyVariableExpressionPair> gbyPairList;
 +    private List<GbyVariableExpressionPair> decorPairList;
 +    private List<VariableExpr> withVarList;
 +    private VariableExpr groupVar;
 +    private List<Pair<Expression, Identifier>> groupFieldList = new ArrayList<Pair<Expression, Identifier>>();
 +    private boolean hashGroupByHint;
++    private boolean groupAll;
 +
 +    public GroupbyClause() {
 +    }
 +
 +    public GroupbyClause(List<GbyVariableExpressionPair> gbyPairList, List<GbyVariableExpressionPair> decorPairList,
 +            List<VariableExpr> withVarList, VariableExpr groupVarExpr,
 +            List<Pair<Expression, Identifier>> groupFieldList, boolean hashGroupByHint) {
++        this(gbyPairList, decorPairList, withVarList, groupVarExpr, groupFieldList, hashGroupByHint, false);
++    }
++
++    public GroupbyClause(List<GbyVariableExpressionPair> gbyPairList, List<GbyVariableExpressionPair> decorPairList,
++            List<VariableExpr> withVarList, VariableExpr groupVarExpr,
++            List<Pair<Expression, Identifier>> groupFieldList, boolean hashGroupByHint, boolean groupAll) {
 +        this.gbyPairList = gbyPairList;
 +        this.decorPairList = decorPairList;
 +        this.withVarList = withVarList;
 +        this.groupVar = groupVarExpr;
 +        if (groupFieldList != null) {
 +            this.groupFieldList = groupFieldList;
 +        }
 +        this.hashGroupByHint = hashGroupByHint;
++        this.groupAll = groupAll;
 +    }
 +
 +    public List<GbyVariableExpressionPair> getGbyPairList() {
 +        return gbyPairList;
 +    }
 +
 +    public void setGbyPairList(List<GbyVariableExpressionPair> vePairList) {
 +        this.gbyPairList = vePairList;
 +    }
 +
 +    public List<VariableExpr> getWithVarList() {
 +        return withVarList;
 +    }
 +
 +    public void setWithVarList(List<VariableExpr> withVarList) {
 +        this.withVarList = withVarList;
 +    }
 +
 +    public VariableExpr getGroupVar() {
 +        return groupVar;
 +    }
 +
 +    public void setGroupVar(VariableExpr groupVarExpr) {
 +        this.groupVar = groupVarExpr;
 +    }
 +
 +    public List<Pair<Expression, Identifier>> getGroupFieldList() {
 +        return groupFieldList;
 +    }
 +
 +    public void setGroupFieldList(List<Pair<Expression, Identifier>> groupFieldList) {
 +        this.groupFieldList = groupFieldList;
 +    }
 +
 +    @Override
 +    public ClauseType getClauseType() {
 +        return ClauseType.GROUP_BY_CLAUSE;
 +    }
 +
 +    @Override
 +    public <R, T> R accept(ILangVisitor<R, T> visitor, T arg) throws AsterixException {
 +        return visitor.visit(this, arg);
 +    }
 +
 +    public void setDecorPairList(List<GbyVariableExpressionPair> decorPairList) {
 +        this.decorPairList = decorPairList;
 +    }
 +
 +    public List<GbyVariableExpressionPair> getDecorPairList() {
 +        return decorPairList;
 +    }
 +
 +    public void setHashGroupByHint(boolean hashGroupByHint) {
 +        this.hashGroupByHint = hashGroupByHint;
 +    }
 +
 +    public boolean hasHashGroupByHint() {
 +        return hashGroupByHint;
 +    }
 +
 +    public boolean hasDecorList() {
 +        return decorPairList != null && decorPairList.size() > 0;
 +    }
 +
 +    public boolean hasWithList() {
 +        return withVarList != null && withVarList.size() > 0;
 +    }
 +
 +    public boolean hasGroupVar() {
 +        return groupVar != null;
 +    }
 +
 +    public boolean hasGroupFieldList() {
 +        return groupFieldList != null && groupFieldList.size() > 0;
 +    }
++
++    public boolean isGroupAll() {
++        return groupAll;
++    }
 +}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/clause/LimitClause.java
----------------------------------------------------------------------
diff --cc asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/clause/LimitClause.java
index 451d7ef,0000000..7ebedc2
mode 100644,000000..100644
--- a/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/clause/LimitClause.java
+++ b/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/clause/LimitClause.java
@@@ -1,63 -1,0 +1,67 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +package org.apache.asterix.lang.common.clause;
 +
 +import org.apache.asterix.common.exceptions.AsterixException;
 +import org.apache.asterix.lang.common.base.Clause;
 +import org.apache.asterix.lang.common.base.Expression;
 +import org.apache.asterix.lang.common.visitor.base.ILangVisitor;
 +
 +public class LimitClause implements Clause {
 +    private Expression limitexpr;
 +    private Expression offset;
 +
 +    public LimitClause() {
 +    }
 +
 +    public LimitClause(Expression limitexpr, Expression offset) {
 +        this.limitexpr = limitexpr;
 +        this.offset = offset;
 +    }
 +
 +    public Expression getLimitExpr() {
 +        return limitexpr;
 +    }
 +
 +    public void setLimitExpr(Expression limitexpr) {
 +        this.limitexpr = limitexpr;
 +    }
 +
 +    public Expression getOffset() {
 +        return offset;
 +    }
 +
 +    public void setOffset(Expression offset) {
 +        this.offset = offset;
 +    }
 +
++    public boolean hasOffset() {
++        return offset != null;
++    }
++
 +    @Override
 +    public ClauseType getClauseType() {
 +        return ClauseType.LIMIT_CLAUSE;
 +    }
 +
 +    @Override
 +    public <R, T> R accept(ILangVisitor<R, T> visitor, T arg) throws AsterixException {
 +        return visitor.visit(this, arg);
 +    }
 +}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/expression/CallExpr.java
----------------------------------------------------------------------
diff --cc asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/expression/CallExpr.java
index 8cff0e8,0000000..c7d48b6
mode 100644,000000..100644
--- a/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/expression/CallExpr.java
+++ b/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/expression/CallExpr.java
@@@ -1,64 -1,0 +1,68 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +package org.apache.asterix.lang.common.expression;
 +
 +import java.util.List;
 +
 +import org.apache.asterix.common.exceptions.AsterixException;
 +import org.apache.asterix.common.functions.FunctionSignature;
 +import org.apache.asterix.lang.common.base.AbstractExpression;
 +import org.apache.asterix.lang.common.base.Expression;
 +import org.apache.asterix.lang.common.visitor.base.ILangVisitor;
 +
 +public class CallExpr extends AbstractExpression {
-     private final FunctionSignature functionSignature;
++    private FunctionSignature functionSignature;
 +    private List<Expression> exprList;
 +    private boolean isBuiltin;
 +
 +    public CallExpr(FunctionSignature functionSignature, List<Expression> exprList) {
 +        this.functionSignature = functionSignature;
 +        this.exprList = exprList;
 +    }
 +
 +    public FunctionSignature getFunctionSignature() {
 +        return functionSignature;
 +    }
 +
 +    public List<Expression> getExprList() {
 +        return exprList;
 +    }
 +
 +    public boolean isBuiltin() {
 +        return isBuiltin;
 +    }
 +
 +    @Override
 +    public Kind getKind() {
 +        return Kind.CALL_EXPRESSION;
 +    }
 +
++    public void setFunctionSignature(FunctionSignature functionSignature) {
++        this.functionSignature = functionSignature;
++    }
++
 +    public void setExprList(List<Expression> exprList) {
 +        this.exprList = exprList;
 +    }
 +
 +    @Override
 +    public <R, T> R accept(ILangVisitor<R, T> visitor, T arg) throws AsterixException {
 +        return visitor.visit(this, arg);
 +    }
 +}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/expression/OperatorExpr.java
----------------------------------------------------------------------
diff --cc asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/expression/OperatorExpr.java
index da7f59e,0000000..65422b2
mode 100644,000000..100644
--- a/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/expression/OperatorExpr.java
+++ b/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/expression/OperatorExpr.java
@@@ -1,144 -1,0 +1,147 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +package org.apache.asterix.lang.common.expression;
 +
 +import java.util.ArrayList;
 +import java.util.List;
 +
 +import org.apache.asterix.common.exceptions.AsterixException;
 +import org.apache.asterix.lang.common.base.AbstractExpression;
 +import org.apache.asterix.lang.common.base.Expression;
 +import org.apache.asterix.lang.common.struct.OperatorType;
 +import org.apache.asterix.lang.common.visitor.base.ILangVisitor;
 +
 +public class OperatorExpr extends AbstractExpression {
 +    private List<Expression> exprList;
 +    private List<OperatorType> opList;
 +    private List<Integer> exprBroadcastIdx;
 +    private boolean currentop = false;
 +
 +    public OperatorExpr() {
 +        super();
 +        exprList = new ArrayList<Expression>();
 +        exprBroadcastIdx = new ArrayList<Integer>();
 +        opList = new ArrayList<OperatorType>();
 +    }
 +
-     public OperatorExpr(List<Expression> exprList, List<Integer> exprBroadcastIdx, List<OperatorType> opList) {
++    public OperatorExpr(List<Expression> exprList, List<Integer> exprBroadcastIdx, List<OperatorType> opList,
++            boolean currentop) {
 +        this.exprList = exprList;
 +        this.exprBroadcastIdx = exprBroadcastIdx;
 +        this.opList = opList;
++        this.currentop = currentop;
 +    }
 +
 +    public boolean isCurrentop() {
 +        return currentop;
 +    }
 +
 +    public void setCurrentop(boolean currentop) {
 +        this.currentop = currentop;
 +    }
 +
 +    public List<Expression> getExprList() {
 +        return exprList;
 +    }
 +
 +    public List<Integer> getExprBroadcastIdx() {
 +        return exprBroadcastIdx;
 +    }
 +
 +    public List<OperatorType> getOpList() {
 +        return opList;
 +    }
 +
 +    public void setExprList(List<Expression> exprList) {
 +        this.exprList = exprList;
 +    }
 +
 +    public void addOperand(Expression operand) {
 +        addOperand(operand, false);
 +    }
 +
 +    public void addOperand(Expression operand, boolean broadcast) {
 +        if (broadcast) {
 +            exprBroadcastIdx.add(exprList.size());
 +        }
 +        exprList.add(operand);
 +    }
 +
 +    public final static boolean opIsComparison(OperatorType t) {
 +        return t == OperatorType.EQ || t == OperatorType.NEQ || t == OperatorType.GT || t == OperatorType.GE
 +                || t == OperatorType.LT || t == OperatorType.LE;
 +    }
 +
 +    public void addOperator(String strOp) {
-         if ("or".equals(strOp))
++        if ("or".equals(strOp)) {
 +            opList.add(OperatorType.OR);
-         else if ("and".equals(strOp))
++        } else if ("and".equals(strOp)) {
 +            opList.add(OperatorType.AND);
-         else if ("<".equals(strOp))
++        } else if ("<".equals(strOp)) {
 +            opList.add(OperatorType.LT);
-         else if (">".equals(strOp))
++        } else if (">".equals(strOp)) {
 +            opList.add(OperatorType.GT);
-         else if ("<=".equals(strOp))
++        } else if ("<=".equals(strOp)) {
 +            opList.add(OperatorType.LE);
-         else if ("<=".equals(strOp))
++        } else if ("<=".equals(strOp)) {
 +            opList.add(OperatorType.LE);
-         else if (">=".equals(strOp))
++        } else if (">=".equals(strOp)) {
 +            opList.add(OperatorType.GE);
-         else if ("=".equals(strOp))
++        } else if ("=".equals(strOp)) {
 +            opList.add(OperatorType.EQ);
-         else if ("!=".equals(strOp))
++        } else if ("!=".equals(strOp)) {
 +            opList.add(OperatorType.NEQ);
-         else if ("+".equals(strOp))
++        } else if ("+".equals(strOp)) {
 +            opList.add(OperatorType.PLUS);
-         else if ("-".equals(strOp))
++        } else if ("-".equals(strOp)) {
 +            opList.add(OperatorType.MINUS);
-         else if ("*".equals(strOp))
++        } else if ("*".equals(strOp)) {
 +            opList.add(OperatorType.MUL);
-         else if ("/".equals(strOp))
++        } else if ("/".equals(strOp)) {
 +            opList.add(OperatorType.DIV);
-         else if ("%".equals(strOp))
++        } else if ("%".equals(strOp)) {
 +            opList.add(OperatorType.MOD);
-         else if ("^".equals(strOp))
++        } else if ("^".equals(strOp)) {
 +            opList.add(OperatorType.CARET);
-         else if ("idiv".equals(strOp))
++        } else if ("idiv".equals(strOp)) {
 +            opList.add(OperatorType.IDIV);
-         else if ("~=".equals(strOp))
++        } else if ("~=".equals(strOp)) {
 +            opList.add(OperatorType.FUZZY_EQ);
++        }
 +    }
 +
 +    @Override
 +    public Kind getKind() {
 +        return Kind.OP_EXPRESSION;
 +    }
 +
 +    @Override
 +    public <R, T> R accept(ILangVisitor<R, T> visitor, T arg) throws AsterixException {
 +        return visitor.visit(this, arg);
 +    }
 +
 +    public boolean isBroadcastOperand(int idx) {
 +        for (Integer i : exprBroadcastIdx) {
 +            if (i == idx) {
 +                return true;
 +            }
 +        }
 +        return false;
 +    }
 +}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/statement/Query.java
----------------------------------------------------------------------
diff --cc asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/statement/Query.java
index ac7698d,0000000..64ca0c1
mode 100644,000000..100644
--- a/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/statement/Query.java
+++ b/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/statement/Query.java
@@@ -1,85 -1,0 +1,97 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +package org.apache.asterix.lang.common.statement;
 +
 +import java.util.ArrayList;
 +import java.util.List;
 +
 +import org.apache.asterix.common.exceptions.AsterixException;
 +import org.apache.asterix.lang.common.base.Expression;
 +import org.apache.asterix.lang.common.base.Statement;
 +import org.apache.asterix.lang.common.visitor.base.ILangVisitor;
 +
 +public class Query implements Statement {
 +    private boolean topLevel = true;
 +    private Expression body;
 +    private int varCounter;
 +    private List<String> dataverses = new ArrayList<String>();
 +    private List<String> datasets = new ArrayList<String>();
 +
++    public Query() {
++
++    }
++
++    public Query(boolean topLevel, Expression body, int varCounter, List<String> dataverses, List<String> datasets) {
++        this.topLevel = topLevel;
++        this.body = body;
++        this.varCounter = varCounter;
++        this.dataverses.addAll(dataverses);
++        this.datasets.addAll(datasets);
++    }
++
 +    public Expression getBody() {
 +        return body;
 +    }
 +
 +    public void setBody(Expression body) {
 +        this.body = body;
 +    }
 +
 +    public int getVarCounter() {
 +        return varCounter;
 +    }
 +
 +    public void setVarCounter(int varCounter) {
 +        this.varCounter = varCounter;
 +    }
 +
 +    public void setTopLevel(boolean topLevel) {
 +        this.topLevel = topLevel;
 +    }
 +
 +    public boolean isTopLevel() {
 +        return topLevel;
 +    }
 +
 +    @Override
 +    public Kind getKind() {
 +        return Kind.QUERY;
 +    }
 +
 +    @Override
 +    public <R, T> R accept(ILangVisitor<R, T> visitor, T arg) throws AsterixException {
 +        return visitor.visit(this, arg);
 +    }
 +
 +    public void setDataverses(List<String> dataverses) {
 +        this.dataverses = dataverses;
 +    }
 +
 +    public void setDatasets(List<String> datasets) {
 +        this.datasets = datasets;
 +    }
 +
 +    public List<String> getDataverses() {
 +        return dataverses;
 +    }
 +
 +    public List<String> getDatasets() {
 +        return datasets;
 +    }
 +}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/visitor/CloneAndSubstituteVariablesVisitor.java
----------------------------------------------------------------------
diff --cc asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/visitor/CloneAndSubstituteVariablesVisitor.java
index ebe266e,0000000..db55c0e
mode 100644,000000..100644
--- a/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/visitor/CloneAndSubstituteVariablesVisitor.java
+++ b/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/visitor/CloneAndSubstituteVariablesVisitor.java
@@@ -1,342 -1,0 +1,342 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +package org.apache.asterix.lang.common.visitor;
 +
 +import java.util.ArrayList;
 +import java.util.LinkedList;
 +import java.util.List;
 +
 +import org.apache.asterix.common.exceptions.AsterixException;
 +import org.apache.asterix.lang.common.base.Expression;
 +import org.apache.asterix.lang.common.base.Expression.Kind;
 +import org.apache.asterix.lang.common.base.ILangExpression;
 +import org.apache.asterix.lang.common.clause.GroupbyClause;
 +import org.apache.asterix.lang.common.clause.LetClause;
 +import org.apache.asterix.lang.common.clause.LimitClause;
 +import org.apache.asterix.lang.common.clause.OrderbyClause;
 +import org.apache.asterix.lang.common.clause.WhereClause;
 +import org.apache.asterix.lang.common.expression.CallExpr;
 +import org.apache.asterix.lang.common.expression.FieldAccessor;
 +import org.apache.asterix.lang.common.expression.FieldBinding;
 +import org.apache.asterix.lang.common.expression.GbyVariableExpressionPair;
 +import org.apache.asterix.lang.common.expression.IfExpr;
 +import org.apache.asterix.lang.common.expression.IndexAccessor;
 +import org.apache.asterix.lang.common.expression.ListConstructor;
 +import org.apache.asterix.lang.common.expression.LiteralExpr;
 +import org.apache.asterix.lang.common.expression.OperatorExpr;
 +import org.apache.asterix.lang.common.expression.QuantifiedExpression;
 +import org.apache.asterix.lang.common.expression.RecordConstructor;
 +import org.apache.asterix.lang.common.expression.UnaryExpr;
 +import org.apache.asterix.lang.common.expression.VariableExpr;
 +import org.apache.asterix.lang.common.rewrites.LangRewritingContext;
 +import org.apache.asterix.lang.common.rewrites.VariableSubstitutionEnvironment;
 +import org.apache.asterix.lang.common.statement.FunctionDecl;
 +import org.apache.asterix.lang.common.statement.Query;
 +import org.apache.asterix.lang.common.struct.Identifier;
 +import org.apache.asterix.lang.common.struct.QuantifiedPair;
 +import org.apache.asterix.lang.common.struct.VarIdentifier;
 +import org.apache.asterix.lang.common.util.VariableCloneAndSubstitutionUtil;
 +import org.apache.asterix.lang.common.visitor.base.AbstractQueryExpressionVisitor;
 +import org.apache.hyracks.algebricks.common.utils.Pair;
 +
 +public class CloneAndSubstituteVariablesVisitor extends
 +        AbstractQueryExpressionVisitor<Pair<ILangExpression, VariableSubstitutionEnvironment>, VariableSubstitutionEnvironment> {
 +
 +    private LangRewritingContext context;
 +
 +    public CloneAndSubstituteVariablesVisitor(LangRewritingContext context) {
 +        this.context = context;
 +    }
 +
 +    @Override
 +    public Pair<ILangExpression, VariableSubstitutionEnvironment> visit(LetClause lc,
 +            VariableSubstitutionEnvironment env) throws AsterixException {
 +        Pair<ILangExpression, VariableSubstitutionEnvironment> p1 = lc.getBindingExpr().accept(this, env);
 +        VariableExpr varExpr = lc.getVarExpr();
 +        VariableExpr newVe = generateNewVariable(context, varExpr);
 +        LetClause newLet = new LetClause(newVe, (Expression) p1.first);
 +        return new Pair<ILangExpression, VariableSubstitutionEnvironment>(newLet,
 +                VariableCloneAndSubstitutionUtil.eliminateSubstFromList(lc.getVarExpr(), env));
 +    }
 +
 +    @Override
 +    public Pair<ILangExpression, VariableSubstitutionEnvironment> visit(GroupbyClause gc,
 +            VariableSubstitutionEnvironment env) throws AsterixException {
 +        VariableSubstitutionEnvironment newSubs = env;
 +        List<GbyVariableExpressionPair> newGbyList = VariableCloneAndSubstitutionUtil.substInVarExprPair(context,
 +                gc.getGbyPairList(), env, newSubs, this);
 +        List<GbyVariableExpressionPair> newDecorList = gc.hasDecorList() ? VariableCloneAndSubstitutionUtil
 +                .substInVarExprPair(context, gc.getDecorPairList(), env, newSubs, this)
 +                : new ArrayList<GbyVariableExpressionPair>();
 +
 +        VariableExpr newGroupVar = null;
 +        if (gc.hasGroupVar()) {
 +            newGroupVar = generateNewVariable(context, gc.getGroupVar());
 +        }
 +        List<VariableExpr> wList = new LinkedList<VariableExpr>();
 +        if (gc.hasWithList()) {
 +            for (VariableExpr w : gc.getWithVarList()) {
 +                VarIdentifier newVar = context.getRewrittenVar(w.getVar().getId());
 +                if (newVar == null) {
 +                    throw new AsterixException("Could not find a rewritten variable identifier for " + w);
 +                }
 +                VariableExpr newWithVar = new VariableExpr(newVar);
 +                wList.add(newWithVar);
 +            }
 +        }
 +        List<Pair<Expression, Identifier>> newGroupFieldList = new ArrayList<>();
 +        if (gc.hasGroupFieldList()) {
 +            for (Pair<Expression, Identifier> varId : gc.getGroupFieldList()) {
 +                Expression newExpr = (Expression) varId.first.accept(this, env).first;
 +                newGroupFieldList.add(new Pair<Expression, Identifier>(newExpr, varId.second));
 +            }
 +        }
 +        GroupbyClause newGroup = new GroupbyClause(newGbyList, newDecorList, wList, newGroupVar, newGroupFieldList,
-                 gc.hasHashGroupByHint());
++                gc.hasHashGroupByHint(), gc.isGroupAll());
 +        return new Pair<ILangExpression, VariableSubstitutionEnvironment>(newGroup, newSubs);
 +    }
 +
 +    @Override
 +    public Pair<ILangExpression, VariableSubstitutionEnvironment> visit(QuantifiedExpression qe,
 +            VariableSubstitutionEnvironment env) throws AsterixException {
 +        List<QuantifiedPair> oldPairs = qe.getQuantifiedList();
 +        List<QuantifiedPair> newPairs = new ArrayList<QuantifiedPair>(oldPairs.size());
 +        VariableSubstitutionEnvironment newSubs = env;
 +        for (QuantifiedPair t : oldPairs) {
 +            VariableExpr newVar = generateNewVariable(context, t.getVarExpr());
 +            newSubs = VariableCloneAndSubstitutionUtil.eliminateSubstFromList(newVar, newSubs);
 +            Pair<ILangExpression, VariableSubstitutionEnvironment> p1 = visitUnnesBindingExpression(t.getExpr(),
 +                    newSubs);
 +            QuantifiedPair t2 = new QuantifiedPair(newVar, (Expression) p1.first);
 +            newPairs.add(t2);
 +        }
 +        Pair<ILangExpression, VariableSubstitutionEnvironment> p2 = qe.getSatisfiesExpr().accept(this, newSubs);
 +        QuantifiedExpression qe2 = new QuantifiedExpression(qe.getQuantifier(), newPairs, (Expression) p2.first);
 +        return new Pair<ILangExpression, VariableSubstitutionEnvironment>(qe2, newSubs);
 +    }
 +
 +    @Override
 +    public Pair<ILangExpression, VariableSubstitutionEnvironment> visit(WhereClause wc,
 +            VariableSubstitutionEnvironment env) throws AsterixException {
 +        Pair<ILangExpression, VariableSubstitutionEnvironment> p1 = wc.getWhereExpr().accept(this, env);
 +        WhereClause newW = new WhereClause((Expression) p1.first);
 +        return new Pair<ILangExpression, VariableSubstitutionEnvironment>(newW, p1.second);
 +    }
 +
 +    @Override
 +    public Pair<ILangExpression, VariableSubstitutionEnvironment> visit(CallExpr pf,
 +            VariableSubstitutionEnvironment env) throws AsterixException {
 +        List<Expression> exprList = VariableCloneAndSubstitutionUtil.visitAndCloneExprList(pf.getExprList(), env, this);
 +        CallExpr f = new CallExpr(pf.getFunctionSignature(), exprList);
 +        return new Pair<ILangExpression, VariableSubstitutionEnvironment>(f, env);
 +    }
 +
 +    @Override
 +    public Pair<ILangExpression, VariableSubstitutionEnvironment> visit(FunctionDecl fd,
 +            VariableSubstitutionEnvironment env) throws AsterixException {
 +        List<VarIdentifier> newList = new ArrayList<VarIdentifier>(fd.getParamList().size());
 +        for (VarIdentifier vi : fd.getParamList()) {
 +            VariableExpr varExpr = new VariableExpr(vi);
 +            if (!env.constainsOldVar(varExpr)) {
 +                throw new AsterixException("Parameter " + vi + " does not appear in the substitution list.");
 +            }
 +            Expression newExpr = env.findSubstituion(varExpr);
 +            if (newExpr.getKind() != Kind.VARIABLE_EXPRESSION) {
 +                throw new AsterixException("Parameter " + vi + " cannot be substituted by a non-variable expression.");
 +            }
 +            newList.add(((VariableExpr) newExpr).getVar());
 +        }
 +
 +        Pair<ILangExpression, VariableSubstitutionEnvironment> p1 = fd.getFuncBody().accept(this, env);
 +        FunctionDecl newF = new FunctionDecl(fd.getSignature(), newList, (Expression) p1.first);
 +        return new Pair<ILangExpression, VariableSubstitutionEnvironment>(newF, env);
 +    }
 +
 +    @Override
 +    public Pair<ILangExpression, VariableSubstitutionEnvironment> visit(IfExpr ifexpr,
 +            VariableSubstitutionEnvironment env) throws AsterixException {
 +        Pair<ILangExpression, VariableSubstitutionEnvironment> p1 = ifexpr.getCondExpr().accept(this, env);
 +        Pair<ILangExpression, VariableSubstitutionEnvironment> p2 = ifexpr.getThenExpr().accept(this, env);
 +        Pair<ILangExpression, VariableSubstitutionEnvironment> p3 = ifexpr.getElseExpr().accept(this, env);
 +        IfExpr i = new IfExpr((Expression) p1.first, (Expression) p2.first, (Expression) p3.first);
 +        return new Pair<ILangExpression, VariableSubstitutionEnvironment>(i, env);
 +    }
 +
 +    @Override
 +    public Pair<ILangExpression, VariableSubstitutionEnvironment> visit(LimitClause lc,
 +            VariableSubstitutionEnvironment env) throws AsterixException {
 +        Pair<ILangExpression, VariableSubstitutionEnvironment> p1 = lc.getLimitExpr().accept(this, env);
 +        Pair<ILangExpression, VariableSubstitutionEnvironment> p2 = null;
 +        Expression lcOffsetExpr = lc.getOffset();
 +        if (lcOffsetExpr != null) {
 +            p2 = lcOffsetExpr.accept(this, env);
 +        } else {
 +            p2 = new Pair<ILangExpression, VariableSubstitutionEnvironment>(null, null);
 +        }
 +        LimitClause c = new LimitClause((Expression) p1.first, (Expression) p2.first);
 +        return new Pair<ILangExpression, VariableSubstitutionEnvironment>(c, env);
 +    }
 +
 +    @Override
 +    public Pair<ILangExpression, VariableSubstitutionEnvironment> visit(ListConstructor lc,
 +            VariableSubstitutionEnvironment env) throws AsterixException {
 +        List<Expression> oldExprList = lc.getExprList();
 +        List<Expression> exprs = VariableCloneAndSubstitutionUtil.visitAndCloneExprList(oldExprList, env, this);
 +        ListConstructor c = new ListConstructor(lc.getType(), exprs);
 +        return new Pair<ILangExpression, VariableSubstitutionEnvironment>(c, env);
 +    }
 +
 +    @Override
 +    public Pair<ILangExpression, VariableSubstitutionEnvironment> visit(LiteralExpr l,
 +            VariableSubstitutionEnvironment env) throws AsterixException {
 +        return new Pair<ILangExpression, VariableSubstitutionEnvironment>(l, env);
 +    }
 +
 +    @Override
 +    public Pair<ILangExpression, VariableSubstitutionEnvironment> visit(OperatorExpr op,
 +            VariableSubstitutionEnvironment env) throws AsterixException {
 +        List<Expression> oldExprList = op.getExprList();
 +        List<Expression> exprs = new ArrayList<Expression>(oldExprList.size());
 +        for (Expression e : oldExprList) {
 +            Pair<ILangExpression, VariableSubstitutionEnvironment> p1 = e.accept(this, env);
 +            exprs.add((Expression) p1.first);
 +        }
-         OperatorExpr oe = new OperatorExpr(exprs, op.getExprBroadcastIdx(), op.getOpList());
-         oe.setCurrentop(op.isCurrentop());
++        OperatorExpr oe = new OperatorExpr(exprs, op.getExprBroadcastIdx(), op.getOpList(), op.isCurrentop());
 +        return new Pair<ILangExpression, VariableSubstitutionEnvironment>(oe, env);
 +    }
 +
 +    @Override
 +    public Pair<ILangExpression, VariableSubstitutionEnvironment> visit(OrderbyClause oc,
 +            VariableSubstitutionEnvironment env) throws AsterixException {
 +        List<Expression> exprList = VariableCloneAndSubstitutionUtil.visitAndCloneExprList(oc.getOrderbyList(), env,
 +                this);
 +        OrderbyClause oc2 = new OrderbyClause(exprList, oc.getModifierList());
 +        oc2.setNumFrames(oc.getNumFrames());
 +        oc2.setNumTuples(oc.getNumTuples());
 +        oc2.setRangeMap(oc.getRangeMap());
 +        return new Pair<ILangExpression, VariableSubstitutionEnvironment>(oc2, env);
 +    }
 +
 +    @Override
 +    public Pair<ILangExpression, VariableSubstitutionEnvironment> visit(Query q, VariableSubstitutionEnvironment env)
 +            throws AsterixException {
 +        Query newQ = new Query();
 +        Pair<ILangExpression, VariableSubstitutionEnvironment> p1 = q.getBody().accept(this, env);
 +        newQ.setBody((Expression) p1.first);
 +        return new Pair<ILangExpression, VariableSubstitutionEnvironment>(newQ, p1.second);
 +    }
 +
 +    @Override
 +    public Pair<ILangExpression, VariableSubstitutionEnvironment> visit(RecordConstructor rc,
 +            VariableSubstitutionEnvironment env) throws AsterixException {
 +        List<FieldBinding> oldFbs = rc.getFbList();
 +        ArrayList<FieldBinding> newFbs = new ArrayList<FieldBinding>(oldFbs.size());
 +        for (FieldBinding fb : oldFbs) {
 +            Pair<ILangExpression, VariableSubstitutionEnvironment> p1 = fb.getLeftExpr().accept(this, env);
 +            Pair<ILangExpression, VariableSubstitutionEnvironment> p2 = fb.getRightExpr().accept(this, env);
 +            FieldBinding fb2 = new FieldBinding((Expression) p1.first, (Expression) p2.first);
 +            newFbs.add(fb2);
 +        }
 +        RecordConstructor newRc = new RecordConstructor(newFbs);
 +        return new Pair<ILangExpression, VariableSubstitutionEnvironment>(newRc, env);
 +    }
 +
 +    @Override
 +    public Pair<ILangExpression, VariableSubstitutionEnvironment> visit(UnaryExpr u,
 +            VariableSubstitutionEnvironment env) throws AsterixException {
 +        Pair<ILangExpression, VariableSubstitutionEnvironment> p1 = u.getExpr().accept(this, env);
 +        UnaryExpr newU = new UnaryExpr(u.getSign(), (Expression) p1.first);
 +        return new Pair<ILangExpression, VariableSubstitutionEnvironment>(newU, env);
 +    }
 +
 +    @Override
 +    public Pair<ILangExpression, VariableSubstitutionEnvironment> visit(IndexAccessor ia,
 +            VariableSubstitutionEnvironment env) throws AsterixException {
 +        Pair<ILangExpression, VariableSubstitutionEnvironment> p1 = ia.getExpr().accept(this, env);
 +        Expression indexExpr = null;
 +        if (!ia.isAny()) {
 +            Pair<ILangExpression, VariableSubstitutionEnvironment> p2 = ia.getIndexExpr().accept(this, env);
 +            indexExpr = (Expression) p2.first;
 +        }
 +        IndexAccessor i = new IndexAccessor((Expression) p1.first, indexExpr);
 +        i.setAny(ia.isAny());
 +        return new Pair<ILangExpression, VariableSubstitutionEnvironment>(i, env);
 +    }
 +
 +    @Override
 +    public Pair<ILangExpression, VariableSubstitutionEnvironment> visit(FieldAccessor fa,
 +            VariableSubstitutionEnvironment env) throws AsterixException {
 +        Pair<ILangExpression, VariableSubstitutionEnvironment> p = fa.getExpr().accept(this, env);
 +        FieldAccessor newF = new FieldAccessor((Expression) p.first, fa.getIdent());
 +        return new Pair<ILangExpression, VariableSubstitutionEnvironment>(newF, p.second);
 +    }
 +
 +    @Override
 +    public Pair<ILangExpression, VariableSubstitutionEnvironment> visit(VariableExpr v,
 +            VariableSubstitutionEnvironment env) throws AsterixException {
 +        return new Pair<ILangExpression, VariableSubstitutionEnvironment>(rewriteVariableExpr(v, env), env);
 +    }
 +
 +    // Replace a variable expression if the variable is to-be substituted.
-     protected Expression rewriteVariableExpr(VariableExpr expr, VariableSubstitutionEnvironment env) {
++    protected Expression rewriteVariableExpr(VariableExpr expr, VariableSubstitutionEnvironment env)
++            throws AsterixException {
 +        if (env.constainsOldVar(expr)) {
 +            return env.findSubstituion(expr);
 +        } else {
 +            // it is a variable from the context
 +            VarIdentifier var = context.getRewrittenVar(expr.getVar().getId());
 +            if (var != null) {
 +                return new VariableExpr(var);
 +            }
 +        }
 +        return expr;
 +    }
 +
 +    /**
 +     * Generates a new variable for an existing variable.
 +     *
 +     * @param context
 +     *            , the language rewriting context which keeps all the rewriting variable-int-id to variable-string-identifier mappings.
 +     * @param varExpr
 +     *            , the existing variable expression.
 +     * @return the new variable expression.
 +     */
 +    public VariableExpr generateNewVariable(LangRewritingContext context, VariableExpr varExpr) {
 +        VarIdentifier vi = varExpr.getVar();
 +        VarIdentifier newVar = context.mapOldId(vi.getId(), vi.getValue());
 +        VariableExpr newVarExpr = new VariableExpr(newVar);
 +        return newVarExpr;
 +    }
 +
 +    /**
 +     * Visits an expression that is used for unnest binding.
 +     *
 +     * @param expr,
 +     *            the expression to consider.
 +     * @param env,
 +     *            the variable substitution environment.
 +     * @return a pair of an ILangExpression and a variable substitution environment.
 +     * @throws AsterixException
 +     */
 +    protected Pair<ILangExpression, VariableSubstitutionEnvironment> visitUnnesBindingExpression(Expression expr,
 +            VariableSubstitutionEnvironment env) throws AsterixException {
 +        return expr.accept(this, env);
 +    }
 +
 +}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/clause/SelectBlock.java
----------------------------------------------------------------------
diff --cc asterixdb/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/clause/SelectBlock.java
index 646e150,0000000..f63eced
mode 100644,000000..100644
--- a/asterixdb/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/clause/SelectBlock.java
+++ b/asterixdb/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/clause/SelectBlock.java
@@@ -1,115 -1,0 +1,119 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +
 +package org.apache.asterix.lang.sqlpp.clause;
 +
 +import java.util.List;
 +
 +import org.apache.asterix.common.exceptions.AsterixException;
 +import org.apache.asterix.lang.common.base.Clause;
 +import org.apache.asterix.lang.common.clause.GroupbyClause;
 +import org.apache.asterix.lang.common.clause.LetClause;
 +import org.apache.asterix.lang.common.clause.WhereClause;
 +import org.apache.asterix.lang.common.visitor.base.ILangVisitor;
 +import org.apache.asterix.lang.sqlpp.visitor.base.ISqlppVisitor;
 +
 +public class SelectBlock implements Clause {
 +
 +    private SelectClause selectClause;
 +    private FromClause fromClause;
 +    private List<LetClause> letClauses;
 +    private WhereClause whereClause;
 +    private GroupbyClause groupbyClause;
 +    private List<LetClause> letClausesAfterGby;
 +    private HavingClause havingClause;
 +
 +    public SelectBlock(SelectClause selectClause, FromClause fromClause, List<LetClause> letClauses,
 +            WhereClause whereClause, GroupbyClause groupbyClause, List<LetClause> letClausesAfterGby,
 +            HavingClause havingClause) {
 +        this.selectClause = selectClause;
 +        this.fromClause = fromClause;
 +        this.letClauses = letClauses;
 +        this.whereClause = whereClause;
 +        this.groupbyClause = groupbyClause;
 +        this.havingClause = havingClause;
 +        this.letClausesAfterGby = letClausesAfterGby;
 +    }
 +
 +    @Override
 +    public <R, T> R accept(ILangVisitor<R, T> visitor, T arg) throws AsterixException {
 +        return ((ISqlppVisitor<R, T>) visitor).visit(this, arg);
 +    }
 +
 +    @Override
 +    public ClauseType getClauseType() {
 +        return ClauseType.SELECT_BLOCK;
 +    }
 +
 +    public SelectClause getSelectClause() {
 +        return selectClause;
 +    }
 +
 +    public FromClause getFromClause() {
 +        return fromClause;
 +    }
 +
 +    public List<LetClause> getLetList() {
 +        return letClauses;
 +    }
 +
 +    public WhereClause getWhereClause() {
 +        return whereClause;
 +    }
 +
 +    public GroupbyClause getGroupbyClause() {
 +        return groupbyClause;
 +    }
 +
 +    public HavingClause getHavingClause() {
 +        return havingClause;
 +    }
 +
 +    public boolean hasFromClause() {
 +        return fromClause != null;
 +    }
 +
 +    public boolean hasLetClauses() {
 +        return letClauses != null && letClauses.size() > 0;
 +    }
 +
 +    public boolean hasWhereClause() {
 +        return whereClause != null;
 +    }
 +
 +    public boolean hasGroupbyClause() {
 +        return groupbyClause != null;
 +    }
 +
 +    public boolean hasLetClausesAfterGroupby() {
 +        return letClausesAfterGby != null && letClausesAfterGby.size() > 0;
 +    }
 +
 +    public List<LetClause> getLetListAfterGroupby() {
 +        return letClausesAfterGby;
 +    }
 +
 +    public boolean hasHavingClause() {
 +        return havingClause != null;
 +    }
++
++    public void setGroupbyClause(GroupbyClause groupbyClause) {
++        this.groupbyClause = groupbyClause;
++    }
 +}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/rewrites/SqlppFunctionBodyRewriter.java
----------------------------------------------------------------------
diff --cc asterixdb/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/rewrites/SqlppFunctionBodyRewriter.java
index 1f119ae,0000000..01b9b54
mode 100644,000000..100644
--- a/asterixdb/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/rewrites/SqlppFunctionBodyRewriter.java
+++ b/asterixdb/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/rewrites/SqlppFunctionBodyRewriter.java
@@@ -1,50 -1,0 +1,53 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +package org.apache.asterix.lang.sqlpp.rewrites;
 +
 +import java.util.List;
 +
 +import org.apache.asterix.common.exceptions.AsterixException;
 +import org.apache.asterix.lang.common.rewrites.LangRewritingContext;
 +import org.apache.asterix.lang.common.statement.FunctionDecl;
 +import org.apache.asterix.lang.common.statement.Query;
 +import org.apache.asterix.metadata.declared.AqlMetadataProvider;
 +
 +class SqlppFunctionBodyRewriter extends SqlppQueryRewriter {
 +
 +    @Override
 +    public void rewrite(List<FunctionDecl> declaredFunctions, Query topExpr, AqlMetadataProvider metadataProvider,
 +            LangRewritingContext context) throws AsterixException {
 +        // Sets up parameters.
 +        setup(declaredFunctions, topExpr, metadataProvider, context);
 +
 +        // Inlines column aliases.
 +        inlineColumnAlias();
 +
++        // Rewrites SQL-92 global aggregations.
++        rewriteGlobalAggregations();
++
 +        // Group-by core/sugar rewrites.
 +        rewriteGroupBys();
 +
 +        // Generates ids for variables (considering scopes) but DOES NOT replace unbounded variable access with the dataset function.
 +        // An unbounded variable within a function could be a bounded variable in the top-level query.
 +        variableCheckAndRewrite(false);
 +
 +        // Inlines functions recursively.
 +        inlineDeclaredUdfs();
 +    }
 +}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/rewrites/SqlppQueryRewriter.java
----------------------------------------------------------------------
diff --cc asterixdb/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/rewrites/SqlppQueryRewriter.java
index a2c84ba,0000000..f9a7183
mode 100644,000000..100644
--- a/asterixdb/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/rewrites/SqlppQueryRewriter.java
+++ b/asterixdb/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/rewrites/SqlppQueryRewriter.java
@@@ -1,340 -1,0 +1,369 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +package org.apache.asterix.lang.sqlpp.rewrites;
 +
 +import java.util.ArrayList;
 +import java.util.List;
 +import java.util.Set;
 +
 +import org.apache.asterix.common.exceptions.AsterixException;
 +import org.apache.asterix.common.functions.FunctionSignature;
 +import org.apache.asterix.lang.common.base.Expression;
 +import org.apache.asterix.lang.common.base.IQueryRewriter;
 +import org.apache.asterix.lang.common.clause.LetClause;
 +import org.apache.asterix.lang.common.rewrites.LangRewritingContext;
 +import org.apache.asterix.lang.common.statement.FunctionDecl;
 +import org.apache.asterix.lang.common.statement.Query;
 +import org.apache.asterix.lang.common.util.FunctionUtil;
 +import org.apache.asterix.lang.common.visitor.GatherFunctionCallsVisitor;
 +import org.apache.asterix.lang.sqlpp.clause.AbstractBinaryCorrelateClause;
 +import org.apache.asterix.lang.sqlpp.clause.FromClause;
 +import org.apache.asterix.lang.sqlpp.clause.FromTerm;
 +import org.apache.asterix.lang.sqlpp.clause.HavingClause;
 +import org.apache.asterix.lang.sqlpp.clause.JoinClause;
 +import org.apache.asterix.lang.sqlpp.clause.NestClause;
 +import org.apache.asterix.lang.sqlpp.clause.Projection;
 +import org.apache.asterix.lang.sqlpp.clause.SelectBlock;
 +import org.apache.asterix.lang.sqlpp.clause.SelectClause;
 +import org.apache.asterix.lang.sqlpp.clause.SelectElement;
 +import org.apache.asterix.lang.sqlpp.clause.SelectRegular;
 +import org.apache.asterix.lang.sqlpp.clause.SelectSetOperation;
 +import org.apache.asterix.lang.sqlpp.clause.UnnestClause;
 +import org.apache.asterix.lang.sqlpp.expression.SelectExpression;
 +import org.apache.asterix.lang.sqlpp.parser.FunctionParser;
 +import org.apache.asterix.lang.sqlpp.parser.SqlppParserFactory;
++import org.apache.asterix.lang.sqlpp.rewrites.visitor.InlineColumnAliasVisitor;
++import org.apache.asterix.lang.sqlpp.rewrites.visitor.SqlppBuiltinFunctionRewriteVisitor;
++import org.apache.asterix.lang.sqlpp.rewrites.visitor.SqlppGlobalAggregationSugarVisitor;
++import org.apache.asterix.lang.sqlpp.rewrites.visitor.SqlppGroupByVisitor;
++import org.apache.asterix.lang.sqlpp.rewrites.visitor.SqlppInlineUdfsVisitor;
++import org.apache.asterix.lang.sqlpp.rewrites.visitor.VariableCheckAndRewriteVisitor;
 +import org.apache.asterix.lang.sqlpp.struct.SetOperationRight;
- import org.apache.asterix.lang.sqlpp.visitor.InlineColumnAliasVisitor;
- import org.apache.asterix.lang.sqlpp.visitor.SqlppGroupByVisitor;
- import org.apache.asterix.lang.sqlpp.visitor.SqlppInlineUdfsVisitor;
- import org.apache.asterix.lang.sqlpp.visitor.VariableCheckAndRewriteVisitor;
++import org.apache.asterix.lang.sqlpp.util.FunctionMapUtil;
 +import org.apache.asterix.lang.sqlpp.visitor.base.ISqlppVisitor;
 +import org.apache.asterix.metadata.MetadataManager;
 +import org.apache.asterix.metadata.MetadataTransactionContext;
 +import org.apache.asterix.metadata.declared.AqlMetadataProvider;
 +import org.apache.asterix.metadata.entities.Function;
 +import org.apache.asterix.om.functions.AsterixBuiltinFunctions;
 +
 +class SqlppQueryRewriter implements IQueryRewriter {
 +    private final FunctionParser functionRepository = new FunctionParser(new SqlppParserFactory());
 +    private Query topExpr;
 +    private List<FunctionDecl> declaredFunctions;
 +    private LangRewritingContext context;
 +    private MetadataTransactionContext mdTxnCtx;
 +    private AqlMetadataProvider metadataProvider;
 +
 +    protected void setup(List<FunctionDecl> declaredFunctions, Query topExpr, AqlMetadataProvider metadataProvider,
 +            LangRewritingContext context) {
 +        this.topExpr = topExpr;
 +        this.context = context;
 +        this.declaredFunctions = declaredFunctions;
 +        this.mdTxnCtx = metadataProvider.getMetadataTxnContext();
 +        this.metadataProvider = metadataProvider;
 +    }
 +
 +    @Override
 +    public void rewrite(List<FunctionDecl> declaredFunctions, Query topExpr, AqlMetadataProvider metadataProvider,
 +            LangRewritingContext context) throws AsterixException {
 +        // Sets up parameters.
 +        setup(declaredFunctions, topExpr, metadataProvider, context);
 +
 +        // Inlines column aliases.
 +        inlineColumnAlias();
 +
++        // Rewrites SQL-92 global aggregations.
++        rewriteGlobalAggregations();
++
 +        // Group-by core/sugar rewrites.
 +        rewriteGroupBys();
 +
 +        // Generate ids for variables (considering scopes) and replace global variable access with the dataset function.
 +        variableCheckAndRewrite(true);
 +
 +        // Inlines functions.
 +        inlineDeclaredUdfs();
 +
++        // Rewrites function names.
++        // This should be done after inlineDeclaredUdfs() because user-defined function
++        // names could be case sensitive.
++        rewriteFunctionNames();
++
 +        // Replace global variable access with the dataset function for inlined expressions.
 +        variableCheckAndRewrite(true);
 +
 +        // Sets the var counter of the query.
 +        topExpr.setVarCounter(context.getVarCounter());
 +    }
 +
++    protected void rewriteGlobalAggregations() throws AsterixException {
++        if (topExpr == null) {
++            return;
++        }
++        SqlppGlobalAggregationSugarVisitor globalAggregationVisitor = new SqlppGlobalAggregationSugarVisitor();
++        globalAggregationVisitor.visit(topExpr, null);
++    }
++
++    protected void rewriteFunctionNames() throws AsterixException {
++        if (topExpr == null) {
++            return;
++        }
++        SqlppBuiltinFunctionRewriteVisitor functionNameMapVisitor = new SqlppBuiltinFunctionRewriteVisitor();
++        functionNameMapVisitor.visit(topExpr, null);
++    }
++
 +    protected void inlineColumnAlias() throws AsterixException {
 +        if (topExpr == null) {
 +            return;
 +        }
 +        // Inline column aliases.
 +        InlineColumnAliasVisitor inlineColumnAliasVisitor = new InlineColumnAliasVisitor(context);
 +        inlineColumnAliasVisitor.visit(topExpr, false);
 +    }
 +
 +    protected void variableCheckAndRewrite(boolean overwrite) throws AsterixException {
 +        if (topExpr == null) {
 +            return;
 +        }
 +        VariableCheckAndRewriteVisitor variableCheckAndRewriteVisitor = new VariableCheckAndRewriteVisitor(context,
 +                overwrite, metadataProvider);
 +        variableCheckAndRewriteVisitor.visit(topExpr, null);
 +    }
 +
 +    protected void rewriteGroupBys() throws AsterixException {
 +        if (topExpr == null) {
 +            return;
 +        }
-         SqlppGroupByVisitor groupByVisitor = new SqlppGroupByVisitor(context, metadataProvider);
++        SqlppGroupByVisitor groupByVisitor = new SqlppGroupByVisitor(context);
 +        groupByVisitor.visit(topExpr, null);
 +    }
 +
 +    protected void inlineDeclaredUdfs() throws AsterixException {
 +        if (topExpr == null) {
 +            return;
 +        }
 +        List<FunctionSignature> funIds = new ArrayList<FunctionSignature>();
 +        for (FunctionDecl fdecl : declaredFunctions) {
 +            funIds.add(fdecl.getSignature());
 +        }
 +
 +        List<FunctionDecl> otherFDecls = new ArrayList<FunctionDecl>();
 +        buildOtherUdfs(topExpr.getBody(), otherFDecls, funIds);
 +        declaredFunctions.addAll(otherFDecls);
 +        if (!declaredFunctions.isEmpty()) {
 +            SqlppInlineUdfsVisitor visitor = new SqlppInlineUdfsVisitor(context,
 +                    new SqlppFunctionBodyRewriterFactory() /* the rewriter for function bodies expressions*/,
 +                    declaredFunctions, metadataProvider);
 +            while (topExpr.accept(visitor, declaredFunctions)) {
 +                // loop until no more changes
 +            }
 +        }
 +        declaredFunctions.removeAll(otherFDecls);
 +    }
 +
 +    protected void buildOtherUdfs(Expression expression, List<FunctionDecl> functionDecls,
 +            List<FunctionSignature> declaredFunctions) throws AsterixException {
 +        if (expression == null) {
 +            return;
 +        }
 +        String value = metadataProvider.getConfig().get(FunctionUtil.IMPORT_PRIVATE_FUNCTIONS);
 +        boolean includePrivateFunctions = (value != null) ? Boolean.valueOf(value.toLowerCase()) : false;
 +        Set<FunctionSignature> functionCalls = getFunctionCalls(expression);
 +        for (FunctionSignature signature : functionCalls) {
 +
 +            if (declaredFunctions != null && declaredFunctions.contains(signature)) {
 +                continue;
 +            }
 +
 +            Function function = lookupUserDefinedFunctionDecl(signature);
 +            if (function == null) {
-                 if (AsterixBuiltinFunctions.isBuiltinCompilerFunction(signature, includePrivateFunctions)) {
++                FunctionSignature normalizedSignature = FunctionMapUtil.normalizeBuiltinFunctionSignature(signature,
++                        false);
++                if (AsterixBuiltinFunctions.isBuiltinCompilerFunction(normalizedSignature, includePrivateFunctions)) {
 +                    continue;
 +                }
 +                StringBuilder messageBuilder = new StringBuilder();
 +                if (functionDecls.size() > 0) {
 +                    messageBuilder.append(" function " + functionDecls.get(functionDecls.size() - 1).getSignature()
 +                            + " depends upon function " + signature + " which is undefined");
 +                } else {
 +                    messageBuilder.append(" function " + signature + " is undefined ");
 +                }
 +                throw new AsterixException(messageBuilder.toString());
 +            }
 +
 +            if (function.getLanguage().equalsIgnoreCase(Function.LANGUAGE_AQL)) {
 +                FunctionDecl functionDecl = functionRepository.getFunctionDecl(function);
 +                if (functionDecl != null) {
 +                    if (functionDecls.contains(functionDecl)) {
 +                        throw new AsterixException("ERROR:Recursive invocation "
 +                                + functionDecls.get(functionDecls.size() - 1).getSignature() + " <==> "
 +                                + functionDecl.getSignature());
 +                    }
 +                    functionDecls.add(functionDecl);
 +                    buildOtherUdfs(functionDecl.getFuncBody(), functionDecls, declaredFunctions);
 +                }
 +            }
 +        }
 +
 +    }
 +
 +    private Function lookupUserDefinedFunctionDecl(FunctionSignature signature) throws AsterixException {
 +        if (signature.getNamespace() == null) {
 +            return null;
 +        }
 +        return MetadataManager.INSTANCE.getFunction(mdTxnCtx, signature);
 +    }
 +
 +    private Set<FunctionSignature> getFunctionCalls(Expression expression) throws AsterixException {
 +        GatherFunctionCalls gfc = new GatherFunctionCalls();
 +        expression.accept(gfc, null);
 +        return gfc.getCalls();
 +    }
 +
 +    private static class GatherFunctionCalls extends GatherFunctionCallsVisitor implements ISqlppVisitor<Void, Void> {
 +
 +        public GatherFunctionCalls() {
 +        }
 +
 +        @Override
 +        public Void visit(FromClause fromClause, Void arg) throws AsterixException {
 +            for (FromTerm fromTerm : fromClause.getFromTerms()) {
 +                fromTerm.accept(this, arg);
 +            }
 +            return null;
 +        }
 +
 +        @Override
 +        public Void visit(FromTerm fromTerm, Void arg) throws AsterixException {
 +            fromTerm.getLeftExpression().accept(this, arg);
 +            for (AbstractBinaryCorrelateClause correlateClause : fromTerm.getCorrelateClauses()) {
 +                correlateClause.accept(this, arg);
 +            }
 +            return null;
 +        }
 +
 +        @Override
 +        public Void visit(JoinClause joinClause, Void arg) throws AsterixException {
 +            joinClause.getRightExpression().accept(this, arg);
 +            joinClause.getConditionExpression().accept(this, arg);
 +            return null;
 +        }
 +
 +        @Override
 +        public Void visit(NestClause nestClause, Void arg) throws AsterixException {
 +            nestClause.getRightExpression().accept(this, arg);
 +            nestClause.getConditionExpression().accept(this, arg);
 +            return null;
 +        }
 +
 +        @Override
 +        public Void visit(Projection projection, Void arg) throws AsterixException {
 +            projection.getExpression().accept(this, arg);
 +            return null;
 +        }
 +
 +        @Override
 +        public Void visit(SelectBlock selectBlock, Void arg) throws AsterixException {
 +            if (selectBlock.hasFromClause()) {
 +                selectBlock.getFromClause().accept(this, arg);
 +            }
 +            if (selectBlock.hasLetClauses()) {
 +                for (LetClause letClause : selectBlock.getLetList()) {
 +                    letClause.accept(this, arg);
 +                }
 +            }
 +            if (selectBlock.hasWhereClause()) {
 +                selectBlock.getWhereClause().accept(this, arg);
 +            }
 +            if (selectBlock.hasGroupbyClause()) {
 +                selectBlock.getGroupbyClause().accept(this, arg);
 +            }
 +            if (selectBlock.hasLetClausesAfterGroupby()) {
 +                for (LetClause letClause : selectBlock.getLetListAfterGroupby()) {
 +                    letClause.accept(this, arg);
 +                }
 +            }
 +            if (selectBlock.hasHavingClause()) {
 +                selectBlock.getHavingClause().accept(this, arg);
 +            }
 +            selectBlock.getSelectClause().accept(this, arg);
 +            return null;
 +        }
 +
 +        @Override
 +        public Void visit(SelectClause selectClause, Void arg) throws AsterixException {
 +            if (selectClause.selectElement()) {
 +                selectClause.getSelectElement().accept(this, arg);
 +            } else {
 +                selectClause.getSelectRegular().accept(this, arg);
 +            }
 +            return null;
 +        }
 +
 +        @Override
 +        public Void visit(SelectElement selectElement, Void arg) throws AsterixException {
 +            selectElement.getExpression().accept(this, arg);
 +            return null;
 +        }
 +
 +        @Override
 +        public Void visit(SelectRegular selectRegular, Void arg) throws AsterixException {
 +            for (Projection projection : selectRegular.getProjections()) {
 +                projection.accept(this, arg);
 +            }
 +            return null;
 +        }
 +
 +        @Override
 +        public Void visit(SelectSetOperation selectSetOperation, Void arg) throws AsterixException {
 +            selectSetOperation.getLeftInput().accept(this, arg);
 +            for (SetOperationRight setOperationRight : selectSetOperation.getRightInputs()) {
 +                setOperationRight.getSetOperationRightInput().accept(this, arg);
 +            }
 +            return null;
 +        }
 +
 +        @Override
 +        public Void visit(SelectExpression selectStatement, Void arg) throws AsterixException {
 +            selectStatement.getSelectSetOperation().accept(this, arg);
 +            if (selectStatement.hasOrderby()) {
 +                selectStatement.getOrderbyClause().accept(this, arg);
 +            }
 +            if (selectStatement.hasLimit()) {
 +                selectStatement.getLimitClause().accept(this, arg);
 +            }
 +            return null;
 +        }
 +
 +        @Override
 +        public Void visit(UnnestClause unnestClause, Void arg) throws AsterixException {
 +            unnestClause.getRightExpression().accept(this, arg);
 +            return null;
 +        }
 +
 +        @Override
 +        public Void visit(HavingClause havingClause, Void arg) throws AsterixException {
 +            havingClause.getFilterExpression().accept(this, arg);
 +            return null;
 +        }
 +
 +    }
 +}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/util/SqlppRewriteUtil.java
----------------------------------------------------------------------
diff --cc asterixdb/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/util/SqlppRewriteUtil.java
index 0f8488a,0000000..6c737d6
mode 100644,000000..100644
--- a/asterixdb/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/util/SqlppRewriteUtil.java
+++ b/asterixdb/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/util/SqlppRewriteUtil.java
@@@ -1,50 -1,0 +1,59 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +package org.apache.asterix.lang.sqlpp.util;
 +
 +import java.util.Collection;
 +import java.util.HashSet;
 +import java.util.Set;
 +
 +import org.apache.asterix.common.exceptions.AsterixException;
 +import org.apache.asterix.lang.common.base.Expression;
 +import org.apache.asterix.lang.common.base.ILangExpression;
 +import org.apache.asterix.lang.common.expression.VariableExpr;
 +import org.apache.asterix.lang.common.rewrites.LangRewritingContext;
- import org.apache.asterix.lang.sqlpp.visitor.SqlppGroupBySugarVisitor;
- import org.apache.asterix.lang.sqlpp.visitor.UsedVariableVisitor;
++import org.apache.asterix.lang.sqlpp.rewrites.visitor.SqlppGroupBySugarVisitor;
++import org.apache.asterix.lang.sqlpp.visitor.DeepCopyVisitor;
++import org.apache.asterix.lang.sqlpp.visitor.FreeVariableVisitor;
 +
 +public class SqlppRewriteUtil {
 +
 +    // Applying sugar rewriting for group-by.
 +    public static Expression rewriteExpressionUsingGroupVariable(VariableExpr groupVar,
 +            Collection<VariableExpr> targetVarList, ILangExpression expr, LangRewritingContext context)
 +                    throws AsterixException {
-         SqlppGroupBySugarVisitor visitor = new SqlppGroupBySugarVisitor(context, null, groupVar, targetVarList);
++        SqlppGroupBySugarVisitor visitor = new SqlppGroupBySugarVisitor(context, groupVar, targetVarList);
 +        return expr.accept(visitor, null);
 +    }
 +
-     public static Set<VariableExpr> getUsedVariable(Expression expr) throws AsterixException {
++    public static Set<VariableExpr> getFreeVariable(Expression expr) throws AsterixException {
 +        Set<VariableExpr> vars = new HashSet<>();
-         UsedVariableVisitor visitor = new UsedVariableVisitor();
++        FreeVariableVisitor visitor = new FreeVariableVisitor();
 +        expr.accept(visitor, vars);
 +        return vars;
 +    }
 +
++    public static ILangExpression deepCopy(ILangExpression expr) throws AsterixException {
++        if (expr == null) {
++            return expr;
++        }
++        DeepCopyVisitor visitor = new DeepCopyVisitor();
++        return expr.accept(visitor, null);
++    }
++
 +}


[21/50] [abbrv] incubator-asterixdb git commit: Merge branch 'master' into hyracks-merge2

Posted by im...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/results_parser_sqlpp/tpch-sql-like/q06_forecast_revenue_change/q06_forecast_revenue_change.3.ast
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/results_parser_sqlpp/tpch-sql-like/q06_forecast_revenue_change/q06_forecast_revenue_change.3.ast
index 592238f,0000000..c8abf8d
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/results_parser_sqlpp/tpch-sql-like/q06_forecast_revenue_change/q06_forecast_revenue_change.3.ast
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results_parser_sqlpp/tpch-sql-like/q06_forecast_revenue_change/q06_forecast_revenue_change.3.ast
@@@ -1,78 -1,0 +1,86 @@@
 +DataverseUse tpch
 +Query:
- RecordConstructor [
++SELECT [
++FunctionCall tpch.sum@1[
 +  (
-     LiteralExpr [STRING] [revenue]
-     :
-     FunctionCall tpch.sum@1[
-       (
-         SELECT ELEMENT [
-         OperatorExpr [
-           FieldAccessor [
-             Variable [ Name=$l ]
-             Field=l_extendedprice
-           ]
-           *
-           FieldAccessor [
-             Variable [ Name=$l ]
-             Field=l_discount
-           ]
++    SELECT ELEMENT [
++    OperatorExpr [
++      FieldAccessor [
++        FieldAccessor [
++          Variable [ Name=#2 ]
++          Field=l
 +        ]
++        Field=l_extendedprice
++      ]
++      *
++      FieldAccessor [
++        FieldAccessor [
++          Variable [ Name=#2 ]
++          Field=l
 +        ]
-         FROM [          FunctionCall Metadata.dataset@1[
-             LiteralExpr [STRING] [LineItem]
-           ]
-           AS
-           Variable [ Name=$l ]
-         ]
-         Where
-           OperatorExpr [
-             OperatorExpr [
-               FieldAccessor [
-                 Variable [ Name=$l ]
-                 Field=l_shipdate
-               ]
-               >=
-               LiteralExpr [STRING] [1994-01-01]
-             ]
-             and
-             OperatorExpr [
-               FieldAccessor [
-                 Variable [ Name=$l ]
-                 Field=l_shipdate
-               ]
-               <
-               LiteralExpr [STRING] [1995-01-01]
-             ]
-             and
-             OperatorExpr [
-               FieldAccessor [
-                 Variable [ Name=$l ]
-                 Field=l_discount
-               ]
-               >=
-               LiteralExpr [DOUBLE] [0.05]
-             ]
-             and
-             OperatorExpr [
-               FieldAccessor [
-                 Variable [ Name=$l ]
-                 Field=l_discount
-               ]
-               <=
-               LiteralExpr [DOUBLE] [0.07]
-             ]
-             and
-             OperatorExpr [
-               FieldAccessor [
-                 Variable [ Name=$l ]
-                 Field=l_quantity
-               ]
-               <
-               LiteralExpr [LONG] [24]
-             ]
-           ]
-       )
++        Field=l_discount
++      ]
++    ]
++    ]
++    FROM [      Variable [ Name=#1 ]
++      AS
++      Variable [ Name=#2 ]
 +    ]
 +  )
 +]
++revenue
++]
++FROM [  FunctionCall Metadata.dataset@1[
++    LiteralExpr [STRING] [LineItem]
++  ]
++  AS
++  Variable [ Name=$l ]
++]
++Where
++  OperatorExpr [
++    OperatorExpr [
++      FieldAccessor [
++        Variable [ Name=$l ]
++        Field=l_shipdate
++      ]
++      >=
++      LiteralExpr [STRING] [1994-01-01]
++    ]
++    and
++    OperatorExpr [
++      FieldAccessor [
++        Variable [ Name=$l ]
++        Field=l_shipdate
++      ]
++      <
++      LiteralExpr [STRING] [1995-01-01]
++    ]
++    and
++    OperatorExpr [
++      FieldAccessor [
++        Variable [ Name=$l ]
++        Field=l_discount
++      ]
++      >=
++      LiteralExpr [DOUBLE] [0.05]
++    ]
++    and
++    OperatorExpr [
++      FieldAccessor [
++        Variable [ Name=$l ]
++        Field=l_discount
++      ]
++      <=
++      LiteralExpr [DOUBLE] [0.07]
++    ]
++    and
++    OperatorExpr [
++      FieldAccessor [
++        Variable [ Name=$l ]
++        Field=l_quantity
++      ]
++      <
++      LiteralExpr [LONG] [24]
++    ]
++  ]
++Group All


[46/50] [abbrv] incubator-asterixdb git commit: Move merged files

Posted by im...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d630d1a2/asterix-app/src/test/resources/runtimets/queries_sqlpp/global-aggregate/q08/q08.2.update.sqlpp
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/resources/runtimets/queries_sqlpp/global-aggregate/q08/q08.2.update.sqlpp b/asterix-app/src/test/resources/runtimets/queries_sqlpp/global-aggregate/q08/q08.2.update.sqlpp
deleted file mode 100644
index 4b757cd..0000000
--- a/asterix-app/src/test/resources/runtimets/queries_sqlpp/global-aggregate/q08/q08.2.update.sqlpp
+++ /dev/null
@@ -1,30 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-use TinySocial;
-
-
-load  table FacebookUsers using "localfs" (("path"="asterix_nc1://data/tinysocial/fbu.adm"),("format"="adm"));
-
-load  table FacebookMessages using "localfs" (("path"="asterix_nc1://data/tinysocial/fbm.adm"),("format"="adm"));
-
-load  table TwitterUsers using "localfs" (("path"="asterix_nc1://data/tinysocial/twu.adm"),("format"="adm"));
-
-load  table TweetMessages using "localfs" (("path"="asterix_nc1://data/tinysocial/twm.adm"),("format"="adm"));
-

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d630d1a2/asterix-app/src/test/resources/runtimets/queries_sqlpp/global-aggregate/q08/q08.3.query.sqlpp
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/resources/runtimets/queries_sqlpp/global-aggregate/q08/q08.3.query.sqlpp b/asterix-app/src/test/resources/runtimets/queries_sqlpp/global-aggregate/q08/q08.3.query.sqlpp
deleted file mode 100644
index 9ff9b65..0000000
--- a/asterix-app/src/test/resources/runtimets/queries_sqlpp/global-aggregate/q08/q08.3.query.sqlpp
+++ /dev/null
@@ -1,24 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-USE TinySocial;
-
-SELECT COLL_COUNT(u."friend-ids") count
-FROM FacebookUsers u
-ORDER BY u.id;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d630d1a2/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch/q06_forecast_revenue_change/q06_forecast_revenue_change.4.query.sqlpp
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch/q06_forecast_revenue_change/q06_forecast_revenue_change.4.query.sqlpp b/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch/q06_forecast_revenue_change/q06_forecast_revenue_change.4.query.sqlpp
deleted file mode 100644
index 1da2a8a..0000000
--- a/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch/q06_forecast_revenue_change/q06_forecast_revenue_change.4.query.sqlpp
+++ /dev/null
@@ -1,27 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-use tpch;
-
-
-select count(l) as count
-from  LineItem as l
-where l.l_shipdate >= '1994-01-01' and l.l_shipdate < '1995-01-01' and l.l_discount >= 0.05
-  and l.l_discount <= 0.07 and l.l_quantity < 24
-;


[08/50] [abbrv] incubator-asterixdb git commit: Merge branch 'master' into hyracks-merge2

Posted by im...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/declared/AqlMetadataProvider.java
----------------------------------------------------------------------
diff --cc asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/declared/AqlMetadataProvider.java
index a629cdf,0000000..90ebfb7
mode 100644,000000..100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/declared/AqlMetadataProvider.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/declared/AqlMetadataProvider.java
@@@ -1,3017 -1,0 +1,3014 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +package org.apache.asterix.metadata.declared;
 +
 +import java.io.File;
 +import java.io.IOException;
 +import java.util.ArrayList;
 +import java.util.Collection;
 +import java.util.Iterator;
 +import java.util.List;
 +import java.util.Map;
 +import java.util.logging.Level;
 +import java.util.logging.Logger;
 +
 +import org.apache.asterix.common.config.AsterixStorageProperties;
 +import org.apache.asterix.common.config.DatasetConfig.DatasetType;
 +import org.apache.asterix.common.config.DatasetConfig.ExternalFilePendingOp;
 +import org.apache.asterix.common.config.DatasetConfig.IndexType;
 +import org.apache.asterix.common.config.GlobalConfig;
 +import org.apache.asterix.common.config.MetadataConstants;
 +import org.apache.asterix.common.context.AsterixVirtualBufferCacheProvider;
 +import org.apache.asterix.common.context.ITransactionSubsystemProvider;
 +import org.apache.asterix.common.context.TransactionSubsystemProvider;
 +import org.apache.asterix.common.dataflow.AsterixLSMInvertedIndexInsertDeleteOperatorDescriptor;
 +import org.apache.asterix.common.dataflow.AsterixLSMTreeInsertDeleteOperatorDescriptor;
 +import org.apache.asterix.common.dataflow.IAsterixApplicationContextInfo;
 +import org.apache.asterix.common.exceptions.AsterixException;
 +import org.apache.asterix.common.ioopcallbacks.LSMBTreeIOOperationCallbackFactory;
 +import org.apache.asterix.common.ioopcallbacks.LSMBTreeWithBuddyIOOperationCallbackFactory;
 +import org.apache.asterix.common.ioopcallbacks.LSMInvertedIndexIOOperationCallbackFactory;
 +import org.apache.asterix.common.ioopcallbacks.LSMRTreeIOOperationCallbackFactory;
 +import org.apache.asterix.common.transactions.IRecoveryManager.ResourceType;
 +import org.apache.asterix.common.transactions.JobId;
 +import org.apache.asterix.common.utils.StoragePathUtil;
 +import org.apache.asterix.dataflow.data.nontagged.valueproviders.AqlPrimitiveValueProviderFactory;
 +import org.apache.asterix.external.adapter.factory.LookupAdapterFactory;
 +import org.apache.asterix.external.api.IAdapterFactory;
 +import org.apache.asterix.external.api.IDataSourceAdapter;
 +import org.apache.asterix.external.feed.api.ICentralFeedManager;
 +import org.apache.asterix.external.feed.management.FeedConnectionId;
 +import org.apache.asterix.external.feed.policy.FeedPolicyAccessor;
 +import org.apache.asterix.external.feed.watch.FeedActivity;
 +import org.apache.asterix.external.feed.watch.FeedActivity.FeedActivityDetails;
 +import org.apache.asterix.external.indexing.ExternalFile;
 +import org.apache.asterix.external.indexing.IndexingConstants;
 +import org.apache.asterix.external.operators.ExternalBTreeSearchOperatorDescriptor;
 +import org.apache.asterix.external.operators.ExternalDataScanOperatorDescriptor;
 +import org.apache.asterix.external.operators.ExternalLookupOperatorDescriptor;
 +import org.apache.asterix.external.operators.ExternalRTreeSearchOperatorDescriptor;
 +import org.apache.asterix.external.operators.FeedCollectOperatorDescriptor;
 +import org.apache.asterix.external.operators.FeedIntakeOperatorDescriptor;
 +import org.apache.asterix.external.provider.AdapterFactoryProvider;
 +import org.apache.asterix.external.util.ExternalDataConstants;
 +import org.apache.asterix.external.util.FeedConstants;
 +import org.apache.asterix.formats.base.IDataFormat;
 +import org.apache.asterix.formats.nontagged.AqlBinaryComparatorFactoryProvider;
 +import org.apache.asterix.formats.nontagged.AqlLinearizeComparatorFactoryProvider;
 +import org.apache.asterix.formats.nontagged.AqlSerializerDeserializerProvider;
 +import org.apache.asterix.formats.nontagged.AqlTypeTraitProvider;
 +import org.apache.asterix.metadata.MetadataException;
 +import org.apache.asterix.metadata.MetadataManager;
 +import org.apache.asterix.metadata.MetadataTransactionContext;
 +import org.apache.asterix.metadata.dataset.hints.DatasetHints.DatasetCardinalityHint;
 +import org.apache.asterix.metadata.declared.AqlDataSource.AqlDataSourceType;
 +import org.apache.asterix.metadata.entities.Dataset;
 +import org.apache.asterix.metadata.entities.DatasourceAdapter;
 +import org.apache.asterix.metadata.entities.Datatype;
 +import org.apache.asterix.metadata.entities.Dataverse;
 +import org.apache.asterix.metadata.entities.ExternalDatasetDetails;
 +import org.apache.asterix.metadata.entities.Feed;
 +import org.apache.asterix.metadata.entities.FeedPolicyEntity;
 +import org.apache.asterix.metadata.entities.Index;
 +import org.apache.asterix.metadata.entities.InternalDatasetDetails;
 +import org.apache.asterix.metadata.entities.NodeGroup;
 +import org.apache.asterix.metadata.feeds.BuiltinFeedPolicies;
 +import org.apache.asterix.metadata.feeds.FeedMetadataUtil;
 +import org.apache.asterix.metadata.utils.DatasetUtils;
 +import org.apache.asterix.metadata.utils.ExternalDatasetsRegistry;
 +import org.apache.asterix.metadata.utils.SplitsAndConstraintsUtil;
 +import org.apache.asterix.om.functions.AsterixBuiltinFunctions;
 +import org.apache.asterix.om.types.ARecordType;
 +import org.apache.asterix.om.types.ATypeTag;
 +import org.apache.asterix.om.types.IAType;
 +import org.apache.asterix.om.util.AsterixAppContextInfo;
 +import org.apache.asterix.om.util.AsterixClusterProperties;
 +import org.apache.asterix.om.util.NonTaggedFormatUtil;
 +import org.apache.asterix.runtime.base.AsterixTupleFilterFactory;
 +import org.apache.asterix.runtime.formats.FormatUtils;
 +import org.apache.asterix.runtime.formats.NonTaggedDataFormat;
 +import org.apache.asterix.runtime.job.listener.JobEventListenerFactory;
 +import org.apache.asterix.runtime.operators.AsterixLSMInvertedIndexUpsertOperatorDescriptor;
 +import org.apache.asterix.runtime.operators.AsterixLSMTreeUpsertOperatorDescriptor;
 +import org.apache.asterix.transaction.management.opcallbacks.LockThenSearchOperationCallbackFactory;
 +import org.apache.asterix.transaction.management.opcallbacks.PrimaryIndexInstantSearchOperationCallbackFactory;
 +import org.apache.asterix.transaction.management.opcallbacks.PrimaryIndexModificationOperationCallbackFactory;
 +import org.apache.asterix.transaction.management.opcallbacks.PrimaryIndexOperationTrackerProvider;
 +import org.apache.asterix.transaction.management.opcallbacks.PrimaryIndexSearchOperationCallbackFactory;
 +import org.apache.asterix.transaction.management.opcallbacks.SecondaryIndexModificationOperationCallbackFactory;
 +import org.apache.asterix.transaction.management.opcallbacks.SecondaryIndexOperationTrackerProvider;
 +import org.apache.asterix.transaction.management.opcallbacks.SecondaryIndexSearchOperationCallbackFactory;
 +import org.apache.asterix.transaction.management.opcallbacks.TempDatasetPrimaryIndexModificationOperationCallbackFactory;
 +import org.apache.asterix.transaction.management.opcallbacks.TempDatasetSecondaryIndexModificationOperationCallbackFactory;
 +import org.apache.asterix.transaction.management.opcallbacks.UpsertOperationCallbackFactory;
 +import org.apache.asterix.transaction.management.service.transaction.AsterixRuntimeComponentsProvider;
 +import org.apache.hyracks.algebricks.common.constraints.AlgebricksAbsolutePartitionConstraint;
 +import org.apache.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraint;
 +import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
 +import org.apache.hyracks.algebricks.common.utils.Pair;
 +import org.apache.hyracks.algebricks.common.utils.Triple;
 +import org.apache.hyracks.algebricks.core.algebra.base.ILogicalExpression;
 +import org.apache.hyracks.algebricks.core.algebra.base.LogicalVariable;
 +import org.apache.hyracks.algebricks.core.algebra.expressions.IExpressionRuntimeProvider;
 +import org.apache.hyracks.algebricks.core.algebra.expressions.IVariableTypeEnvironment;
 +import org.apache.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
 +import org.apache.hyracks.algebricks.core.algebra.functions.IFunctionInfo;
 +import org.apache.hyracks.algebricks.core.algebra.metadata.IDataSink;
 +import org.apache.hyracks.algebricks.core.algebra.metadata.IDataSource;
 +import org.apache.hyracks.algebricks.core.algebra.metadata.IDataSourceIndex;
 +import org.apache.hyracks.algebricks.core.algebra.metadata.IMetadataProvider;
 +import org.apache.hyracks.algebricks.core.algebra.operators.logical.IOperatorSchema;
 +import org.apache.hyracks.algebricks.core.algebra.properties.DefaultNodeGroupDomain;
 +import org.apache.hyracks.algebricks.core.algebra.properties.INodeDomain;
 +import org.apache.hyracks.algebricks.core.jobgen.impl.JobGenContext;
 +import org.apache.hyracks.algebricks.core.jobgen.impl.JobGenHelper;
 +import org.apache.hyracks.algebricks.core.jobgen.impl.OperatorSchemaImpl;
 +import org.apache.hyracks.algebricks.data.IAWriterFactory;
 +import org.apache.hyracks.algebricks.data.IPrinterFactory;
 +import org.apache.hyracks.algebricks.data.IResultSerializerFactoryProvider;
 +import org.apache.hyracks.algebricks.data.ISerializerDeserializerProvider;
 +import org.apache.hyracks.algebricks.runtime.base.IPushRuntimeFactory;
 +import org.apache.hyracks.algebricks.runtime.base.IScalarEvaluatorFactory;
 +import org.apache.hyracks.algebricks.runtime.operators.std.SinkWriterRuntimeFactory;
 +import org.apache.hyracks.api.dataflow.IOperatorDescriptor;
 +import org.apache.hyracks.api.dataflow.value.IBinaryComparatorFactory;
 +import org.apache.hyracks.api.dataflow.value.ILinearizeComparatorFactory;
 +import org.apache.hyracks.api.dataflow.value.IResultSerializerFactory;
 +import org.apache.hyracks.api.dataflow.value.ISerializerDeserializer;
 +import org.apache.hyracks.api.dataflow.value.ITypeTraits;
 +import org.apache.hyracks.api.dataflow.value.RecordDescriptor;
 +import org.apache.hyracks.api.dataset.ResultSetId;
 +import org.apache.hyracks.api.job.JobSpecification;
 +import org.apache.hyracks.data.std.accessors.PointableBinaryComparatorFactory;
 +import org.apache.hyracks.data.std.primitive.ShortPointable;
 +import org.apache.hyracks.dataflow.common.data.marshalling.ShortSerializerDeserializer;
 +import org.apache.hyracks.dataflow.std.file.FileSplit;
 +import org.apache.hyracks.dataflow.std.file.IFileSplitProvider;
 +import org.apache.hyracks.dataflow.std.result.ResultWriterOperatorDescriptor;
 +import org.apache.hyracks.storage.am.btree.dataflow.BTreeSearchOperatorDescriptor;
 +import org.apache.hyracks.storage.am.common.api.IModificationOperationCallbackFactory;
 +import org.apache.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory;
 +import org.apache.hyracks.storage.am.common.api.ISearchOperationCallbackFactory;
 +import org.apache.hyracks.storage.am.common.dataflow.IIndexDataflowHelperFactory;
 +import org.apache.hyracks.storage.am.common.dataflow.TreeIndexBulkLoadOperatorDescriptor;
 +import org.apache.hyracks.storage.am.common.impls.NoOpOperationCallbackFactory;
 +import org.apache.hyracks.storage.am.common.ophelpers.IndexOperation;
 +import org.apache.hyracks.storage.am.lsm.btree.dataflow.ExternalBTreeDataflowHelperFactory;
 +import org.apache.hyracks.storage.am.lsm.btree.dataflow.ExternalBTreeWithBuddyDataflowHelperFactory;
 +import org.apache.hyracks.storage.am.lsm.btree.dataflow.LSMBTreeDataflowHelperFactory;
 +import org.apache.hyracks.storage.am.lsm.common.api.ILSMMergePolicyFactory;
 +import org.apache.hyracks.storage.am.lsm.invertedindex.dataflow.BinaryTokenizerOperatorDescriptor;
 +import org.apache.hyracks.storage.am.lsm.invertedindex.dataflow.LSMInvertedIndexBulkLoadOperatorDescriptor;
 +import org.apache.hyracks.storage.am.lsm.invertedindex.dataflow.LSMInvertedIndexDataflowHelperFactory;
 +import org.apache.hyracks.storage.am.lsm.invertedindex.dataflow.PartitionedLSMInvertedIndexDataflowHelperFactory;
 +import org.apache.hyracks.storage.am.lsm.invertedindex.tokenizers.IBinaryTokenizerFactory;
 +import org.apache.hyracks.storage.am.lsm.rtree.dataflow.ExternalRTreeDataflowHelperFactory;
 +import org.apache.hyracks.storage.am.lsm.rtree.dataflow.LSMRTreeDataflowHelperFactory;
 +import org.apache.hyracks.storage.am.rtree.dataflow.RTreeSearchOperatorDescriptor;
 +import org.apache.hyracks.storage.am.rtree.frames.RTreePolicyType;
 +
 +public class AqlMetadataProvider implements IMetadataProvider<AqlSourceId, String> {
 +
 +    private static Logger LOGGER = Logger.getLogger(AqlMetadataProvider.class.getName());
 +    private MetadataTransactionContext mdTxnCtx;
 +    private boolean isWriteTransaction;
 +    private final Map<String, String[]> stores;
 +    private Map<String, String> config;
 +    private IAWriterFactory writerFactory;
 +    private FileSplit outputFile;
 +    private boolean asyncResults;
 +    private ResultSetId resultSetId;
 +    private IResultSerializerFactoryProvider resultSerializerFactoryProvider;
 +    private final ICentralFeedManager centralFeedManager;
 +
 +    private final Dataverse defaultDataverse;
 +    private JobId jobId;
 +    private Map<String, Integer> locks;
 +    private boolean isTemporaryDatasetWriteJob = true;
 +
 +    private final AsterixStorageProperties storageProperties;
 +
 +    public String getPropertyValue(String propertyName) {
 +        return config.get(propertyName);
 +    }
 +
 +    public void setConfig(Map<String, String> config) {
 +        this.config = config;
 +    }
 +
 +    public Map<String, String[]> getAllStores() {
 +        return stores;
 +    }
 +
 +    public Map<String, String> getConfig() {
 +        return config;
 +    }
 +
 +    public AqlMetadataProvider(Dataverse defaultDataverse, ICentralFeedManager centralFeedManager) {
 +        this.defaultDataverse = defaultDataverse;
 +        this.stores = AsterixAppContextInfo.getInstance().getMetadataProperties().getStores();
 +        this.storageProperties = AsterixAppContextInfo.getInstance().getStorageProperties();
 +        this.centralFeedManager = centralFeedManager;
 +    }
 +
 +    public void setJobId(JobId jobId) {
 +        this.jobId = jobId;
 +    }
 +
 +    public Dataverse getDefaultDataverse() {
 +        return defaultDataverse;
 +    }
 +
 +    public String getDefaultDataverseName() {
 +        return defaultDataverse == null ? null : defaultDataverse.getDataverseName();
 +    }
 +
 +    public void setWriteTransaction(boolean writeTransaction) {
 +        this.isWriteTransaction = writeTransaction;
 +    }
 +
 +    public void setWriterFactory(IAWriterFactory writerFactory) {
 +        this.writerFactory = writerFactory;
 +    }
 +
 +    public void setMetadataTxnContext(MetadataTransactionContext mdTxnCtx) {
 +        this.mdTxnCtx = mdTxnCtx;
 +    }
 +
 +    public MetadataTransactionContext getMetadataTxnContext() {
 +        return mdTxnCtx;
 +    }
 +
 +    public IAWriterFactory getWriterFactory() {
 +        return this.writerFactory;
 +    }
 +
 +    public FileSplit getOutputFile() {
 +        return outputFile;
 +    }
 +
 +    public void setOutputFile(FileSplit outputFile) {
 +        this.outputFile = outputFile;
 +    }
 +
 +    public boolean getResultAsyncMode() {
 +        return asyncResults;
 +    }
 +
 +    public void setResultAsyncMode(boolean asyncResults) {
 +        this.asyncResults = asyncResults;
 +    }
 +
 +    public ResultSetId getResultSetId() {
 +        return resultSetId;
 +    }
 +
 +    public void setResultSetId(ResultSetId resultSetId) {
 +        this.resultSetId = resultSetId;
 +    }
 +
 +    public void setResultSerializerFactoryProvider(IResultSerializerFactoryProvider rafp) {
 +        this.resultSerializerFactoryProvider = rafp;
 +    }
 +
 +    public IResultSerializerFactoryProvider getResultSerializerFactoryProvider() {
 +        return resultSerializerFactoryProvider;
 +    }
 +
 +    /**
 +     * Retrieve the Output RecordType, as defined by "set output-record-type".
 +     */
 +    public ARecordType findOutputRecordType() throws AlgebricksException {
 +        String outputRecordType = getPropertyValue("output-record-type");
 +        if (outputRecordType == null) {
 +            return null;
 +        }
 +        String dataverse = getDefaultDataverseName();
 +        if (dataverse == null) {
 +            throw new AlgebricksException("Cannot declare output-record-type with no dataverse!");
 +        }
 +        IAType type = findType(dataverse, outputRecordType);
 +        if (!(type instanceof ARecordType)) {
 +            throw new AlgebricksException("Type " + outputRecordType + " is not a record type!");
 +        }
 +        return (ARecordType) type;
 +    }
 +
 +    @Override
 +    public AqlDataSource findDataSource(AqlSourceId id) throws AlgebricksException {
 +        AqlSourceId aqlId = id;
 +        try {
 +            return lookupSourceInMetadata(aqlId);
 +        } catch (MetadataException e) {
 +            throw new AlgebricksException(e);
 +        }
 +    }
 +
 +    public boolean isWriteTransaction() {
 +        // The transaction writes persistent datasets.
 +        return isWriteTransaction;
 +    }
 +
 +    public boolean isTemporaryDatasetWriteJob() {
 +        // The transaction only writes temporary datasets.
 +        return isTemporaryDatasetWriteJob;
 +    }
 +
 +    @Override
 +    public Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> getScannerRuntime(
 +            IDataSource<AqlSourceId> dataSource, List<LogicalVariable> scanVariables,
 +            List<LogicalVariable> projectVariables, boolean projectPushed, List<LogicalVariable> minFilterVars,
 +            List<LogicalVariable> maxFilterVars, IOperatorSchema opSchema, IVariableTypeEnvironment typeEnv,
 +            JobGenContext context, JobSpecification jobSpec, Object implConfig) throws AlgebricksException {
 +        try {
 +            switch (((AqlDataSource) dataSource).getDatasourceType()) {
 +                case FEED:
 +                    return buildFeedCollectRuntime(jobSpec, (FeedDataSource) dataSource);
 +                case INTERNAL_DATASET: {
 +                    // querying an internal dataset
 +                    return buildInternalDatasetScan(jobSpec, scanVariables, minFilterVars, maxFilterVars, opSchema,
 +                            typeEnv, dataSource, context, implConfig);
 +                }
 +                case EXTERNAL_DATASET: {
 +                    // querying an external dataset
 +                    Dataset dataset = ((DatasetDataSource) dataSource).getDataset();
 +                    String itemTypeName = dataset.getItemTypeName();
 +                    IAType itemType = MetadataManager.INSTANCE
 +                            .getDatatype(mdTxnCtx, dataset.getItemTypeDataverseName(), itemTypeName).getDatatype();
 +
 +                    ExternalDatasetDetails edd = (ExternalDatasetDetails) dataset.getDatasetDetails();
 +                    IAdapterFactory adapterFactory = getConfiguredAdapterFactory(dataset, edd.getAdapter(),
 +                            edd.getProperties(), (ARecordType) itemType, false, null, null);
 +                    return buildExternalDatasetDataScannerRuntime(jobSpec, itemType, adapterFactory,
 +                            NonTaggedDataFormat.INSTANCE);
 +                }
 +                case LOADABLE: {
 +                    // This is a load into dataset operation
 +                    LoadableDataSource alds = (LoadableDataSource) dataSource;
 +                    List<List<String>> partitioningKeys = alds.getPartitioningKeys();
 +                    boolean isPKAutoGenerated = ((InternalDatasetDetails) alds.getTargetDataset().getDatasetDetails())
 +                            .isAutogenerated();
 +                    ARecordType itemType = (ARecordType) alds.getLoadedType();
 +                    int pkIndex = 0;
 +                    IAdapterFactory adapterFactory = getConfiguredAdapterFactory(alds.getTargetDataset(),
 +                            alds.getAdapter(), alds.getAdapterProperties(), itemType, isPKAutoGenerated,
 +                            partitioningKeys, null);
 +                    RecordDescriptor rDesc = JobGenHelper.mkRecordDescriptor(typeEnv, opSchema, context);
 +                    return buildLoadableDatasetScan(jobSpec, alds, adapterFactory, rDesc, isPKAutoGenerated,
 +                            partitioningKeys, itemType, pkIndex);
 +                }
 +                default: {
 +                    throw new IllegalArgumentException();
 +                }
 +
 +            }
 +        } catch (AsterixException e) {
 +            throw new AlgebricksException(e);
 +        }
 +    }
 +
 +    @SuppressWarnings("rawtypes")
 +    public Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> buildFeedCollectRuntime(JobSpecification jobSpec,
 +            FeedDataSource feedDataSource) throws AlgebricksException {
 +
 +        try {
 +            ARecordType feedOutputType = (ARecordType) feedDataSource.getItemType();
 +            ISerializerDeserializer payloadSerde = NonTaggedDataFormat.INSTANCE.getSerdeProvider()
 +                    .getSerializerDeserializer(feedOutputType);
 +            IAType metaType = feedDataSource.getMetaItemType();
 +            List<IAType> pkTypes = feedDataSource.getPkTypes();
 +            ArrayList<ISerializerDeserializer> serdes = new ArrayList<>();
 +            serdes.add(payloadSerde);
 +            if (metaType != null) {
 +                serdes.add(AqlSerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(metaType));
 +            }
 +            if (pkTypes != null) {
 +                for (IAType type : pkTypes) {
 +                    serdes.add(AqlSerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(type));
 +                }
 +            }
 +            RecordDescriptor feedDesc = new RecordDescriptor(
 +                    serdes.toArray(new ISerializerDeserializer[serdes.size()]));
 +            FeedPolicyEntity feedPolicy = (FeedPolicyEntity) feedDataSource.getProperties()
 +                    .get(BuiltinFeedPolicies.CONFIG_FEED_POLICY_KEY);
 +            if (feedPolicy == null) {
 +                throw new AlgebricksException("Feed not configured with a policy");
 +            }
 +            feedPolicy.getProperties().put(BuiltinFeedPolicies.CONFIG_FEED_POLICY_KEY, feedPolicy.getPolicyName());
 +            FeedConnectionId feedConnectionId = new FeedConnectionId(feedDataSource.getId().getDataverseName(),
 +                    feedDataSource.getId().getDatasourceName(), feedDataSource.getTargetDataset());
 +            FeedCollectOperatorDescriptor feedCollector = new FeedCollectOperatorDescriptor(jobSpec, feedConnectionId,
 +                    feedDataSource.getSourceFeedId(), feedOutputType, feedDesc, feedPolicy.getProperties(),
 +                    feedDataSource.getLocation());
 +
 +            return new Pair<IOperatorDescriptor, AlgebricksPartitionConstraint>(feedCollector,
 +                    determineLocationConstraint(feedDataSource));
 +
 +        } catch (Exception e) {
 +            throw new AlgebricksException(e);
 +        }
 +    }
 +
 +    private AlgebricksAbsolutePartitionConstraint determineLocationConstraint(FeedDataSource feedDataSource)
 +            throws AsterixException {
 +        String[] locationArray = null;
 +        String locations = null;;
 +        switch (feedDataSource.getSourceFeedType()) {
 +            case PRIMARY:
 +                switch (feedDataSource.getLocation()) {
 +                    case COMPUTE:
 +                        if (feedDataSource.getFeed().getFeedId().equals(feedDataSource.getSourceFeedId())) {
 +                            locationArray = feedDataSource.getLocations();
 +                        } else {
 +                            Collection<FeedActivity> activities = centralFeedManager.getFeedLoadManager()
 +                                    .getFeedActivities();
 +                            Iterator<FeedActivity> it = activities.iterator();
 +                            FeedActivity activity = null;
 +                            while (it.hasNext()) {
 +                                activity = it.next();
 +                                if (activity.getDataverseName().equals(feedDataSource.getSourceFeedId().getDataverse())
 +                                        && activity.getFeedName()
 +                                                .equals(feedDataSource.getSourceFeedId().getFeedName())) {
 +                                    locations = activity.getFeedActivityDetails()
 +                                            .get(FeedActivityDetails.COMPUTE_LOCATIONS);
 +                                    locationArray = locations.split(",");
 +                                    break;
 +                                }
 +                            }
 +                        }
 +                        break;
 +                    case INTAKE:
 +                        locationArray = feedDataSource.getLocations();
 +                        break;
 +                    default:
 +                        throw new AsterixException(
 +                                "Can't subscibe to a FeedRuntime with type: " + feedDataSource.getLocation());
 +                }
 +                break;
 +            case SECONDARY:
 +                Collection<FeedActivity> activities = centralFeedManager.getFeedLoadManager().getFeedActivities();
 +                Iterator<FeedActivity> it = activities.iterator();
 +                FeedActivity activity = null;
 +                while (it.hasNext()) {
 +                    activity = it.next();
 +                    if (activity.getDataverseName().equals(feedDataSource.getSourceFeedId().getDataverse())
 +                            && activity.getFeedName().equals(feedDataSource.getSourceFeedId().getFeedName())) {
 +                        switch (feedDataSource.getLocation()) {
 +                            case INTAKE:
 +                                locations = activity.getFeedActivityDetails()
 +                                        .get(FeedActivityDetails.COLLECT_LOCATIONS);
 +                                break;
 +                            case COMPUTE:
 +                                locations = activity.getFeedActivityDetails()
 +                                        .get(FeedActivityDetails.COMPUTE_LOCATIONS);
 +                                break;
 +                            default:
 +                                throw new AsterixException(
 +                                        "Can't subscibe to a FeedRuntime with type: " + feedDataSource.getLocation());
 +                        }
 +                        break;
 +                    }
 +                }
 +
 +                if (locations != null) {
 +                    locationArray = locations.split(",");
 +                } else {
 +                    String message = "Unable to discover location(s) for source feed data hand-off "
 +                            + feedDataSource.getSourceFeedId();
 +                    if (LOGGER.isLoggable(Level.SEVERE)) {
 +                        LOGGER.severe(message);
 +                    }
 +                    throw new AsterixException(message);
 +                }
 +                break;
 +        }
 +        AlgebricksAbsolutePartitionConstraint locationConstraint = new AlgebricksAbsolutePartitionConstraint(
 +                locationArray);
 +        return locationConstraint;
 +    }
 +
 +    private Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> buildLoadableDatasetScan(JobSpecification jobSpec,
 +            LoadableDataSource alds, IAdapterFactory adapterFactory, RecordDescriptor rDesc, boolean isPKAutoGenerated,
 +            List<List<String>> primaryKeys, ARecordType recType, int pkIndex) throws AlgebricksException {
 +        ExternalDataScanOperatorDescriptor dataScanner = new ExternalDataScanOperatorDescriptor(jobSpec, rDesc,
 +                adapterFactory);
 +        AlgebricksPartitionConstraint constraint;
 +        try {
 +            constraint = adapterFactory.getPartitionConstraint();
 +        } catch (Exception e) {
 +            throw new AlgebricksException(e);
 +        }
 +        return new Pair<IOperatorDescriptor, AlgebricksPartitionConstraint>(dataScanner, constraint);
 +    }
 +
 +    public IDataFormat getDataFormat(String dataverseName) throws AsterixException {
 +        Dataverse dataverse = MetadataManager.INSTANCE.getDataverse(mdTxnCtx, dataverseName);
 +        IDataFormat format;
 +        try {
 +            format = (IDataFormat) Class.forName(dataverse.getDataFormat()).newInstance();
 +        } catch (Exception e) {
 +            throw new AsterixException(e);
 +        }
 +        return format;
 +    }
 +
 +    private Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> buildInternalDatasetScan(JobSpecification jobSpec,
 +            List<LogicalVariable> outputVars, List<LogicalVariable> minFilterVars, List<LogicalVariable> maxFilterVars,
 +            IOperatorSchema opSchema, IVariableTypeEnvironment typeEnv, IDataSource<AqlSourceId> dataSource,
 +            JobGenContext context, Object implConfig) throws AlgebricksException, MetadataException {
 +        AqlSourceId asid = dataSource.getId();
 +        String dataverseName = asid.getDataverseName();
 +        String datasetName = asid.getDatasourceName();
 +        Index primaryIndex = MetadataManager.INSTANCE.getIndex(mdTxnCtx, dataverseName, datasetName, datasetName);
 +
 +        int[] minFilterFieldIndexes = null;
 +        if (minFilterVars != null && !minFilterVars.isEmpty()) {
 +            minFilterFieldIndexes = new int[minFilterVars.size()];
 +            int i = 0;
 +            for (LogicalVariable v : minFilterVars) {
 +                minFilterFieldIndexes[i] = opSchema.findVariable(v);
 +                i++;
 +            }
 +        }
 +        int[] maxFilterFieldIndexes = null;
 +        if (maxFilterVars != null && !maxFilterVars.isEmpty()) {
 +            maxFilterFieldIndexes = new int[maxFilterVars.size()];
 +            int i = 0;
 +            for (LogicalVariable v : maxFilterVars) {
 +                maxFilterFieldIndexes[i] = opSchema.findVariable(v);
 +                i++;
 +            }
 +        }
 +
 +        return buildBtreeRuntime(jobSpec, outputVars, opSchema, typeEnv, context, true, false,
 +                ((DatasetDataSource) dataSource).getDataset(), primaryIndex.getIndexName(), null, null, true, true,
 +                implConfig, minFilterFieldIndexes, maxFilterFieldIndexes);
 +    }
 +
 +    private IAdapterFactory getConfiguredAdapterFactory(Dataset dataset, String adapterName,
 +            Map<String, String> configuration, ARecordType itemType, boolean isPKAutoGenerated,
 +            List<List<String>> primaryKeys, ARecordType metaType) throws AlgebricksException {
 +        try {
 +            configuration.put(ExternalDataConstants.KEY_DATAVERSE, dataset.getDataverseName());
 +            IAdapterFactory adapterFactory = AdapterFactoryProvider.getAdapterFactory(adapterName, configuration,
 +                    itemType, metaType);
 +
 +            // check to see if dataset is indexed
 +            Index filesIndex = MetadataManager.INSTANCE.getIndex(mdTxnCtx, dataset.getDataverseName(),
 +                    dataset.getDatasetName(),
 +                    dataset.getDatasetName().concat(IndexingConstants.EXTERNAL_FILE_INDEX_NAME_SUFFIX));
 +
 +            if (filesIndex != null && filesIndex.getPendingOp() == 0) {
 +                // get files
 +                List<ExternalFile> files = MetadataManager.INSTANCE.getDatasetExternalFiles(mdTxnCtx, dataset);
 +                Iterator<ExternalFile> iterator = files.iterator();
 +                while (iterator.hasNext()) {
 +                    if (iterator.next().getPendingOp() != ExternalFilePendingOp.PENDING_NO_OP) {
 +                        iterator.remove();
 +                    }
 +                }
 +                // TODO Check this call, result of merge from master!
 +                // ((IGenericAdapterFactory) adapterFactory).setFiles(files);
 +            }
 +
 +            return adapterFactory;
 +        } catch (Exception e) {
 +            throw new AlgebricksException("Unable to create adapter", e);
 +        }
 +    }
 +
 +    public Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> buildExternalDatasetDataScannerRuntime(
 +            JobSpecification jobSpec, IAType itemType, IAdapterFactory adapterFactory, IDataFormat format)
 +            throws AlgebricksException {
 +        if (itemType.getTypeTag() != ATypeTag.RECORD) {
 +            throw new AlgebricksException("Can only scan datasets of records.");
 +        }
 +
 +        @SuppressWarnings("rawtypes")
 +        ISerializerDeserializer payloadSerde = format.getSerdeProvider().getSerializerDeserializer(itemType);
 +        RecordDescriptor scannerDesc = new RecordDescriptor(new ISerializerDeserializer[] { payloadSerde });
 +
 +        ExternalDataScanOperatorDescriptor dataScanner = new ExternalDataScanOperatorDescriptor(jobSpec, scannerDesc,
 +                adapterFactory);
 +
 +        AlgebricksPartitionConstraint constraint;
 +        try {
 +            constraint = adapterFactory.getPartitionConstraint();
 +        } catch (Exception e) {
 +            throw new AlgebricksException(e);
 +        }
 +
 +        return new Pair<IOperatorDescriptor, AlgebricksPartitionConstraint>(dataScanner, constraint);
 +    }
 +
 +    public Triple<IOperatorDescriptor, AlgebricksPartitionConstraint, IAdapterFactory> buildFeedIntakeRuntime(
 +            JobSpecification jobSpec, Feed primaryFeed, FeedPolicyAccessor policyAccessor) throws Exception {
 +        Triple<IAdapterFactory, RecordDescriptor, IDataSourceAdapter.AdapterType> factoryOutput = null;
 +        factoryOutput = FeedMetadataUtil.getPrimaryFeedFactoryAndOutput(primaryFeed, policyAccessor, mdTxnCtx);
 +        ARecordType recordType = FeedMetadataUtil.getOutputType(primaryFeed, primaryFeed.getAdapterConfiguration(),
 +                ExternalDataConstants.KEY_TYPE_NAME);
 +        IAdapterFactory adapterFactory = factoryOutput.first;
 +        FeedIntakeOperatorDescriptor feedIngestor = null;
 +        switch (factoryOutput.third) {
 +            case INTERNAL:
 +                feedIngestor = new FeedIntakeOperatorDescriptor(jobSpec, primaryFeed, adapterFactory, recordType,
 +                        policyAccessor, factoryOutput.second);
 +                break;
 +            case EXTERNAL:
 +                String libraryName = primaryFeed.getAdapterName().trim()
 +                        .split(FeedConstants.NamingConstants.LIBRARY_NAME_SEPARATOR)[0];
 +                feedIngestor = new FeedIntakeOperatorDescriptor(jobSpec, primaryFeed, libraryName,
 +                        adapterFactory.getClass().getName(), recordType, policyAccessor, factoryOutput.second);
 +                break;
 +        }
 +
 +        AlgebricksPartitionConstraint partitionConstraint = adapterFactory.getPartitionConstraint();
 +        return new Triple<IOperatorDescriptor, AlgebricksPartitionConstraint, IAdapterFactory>(feedIngestor,
 +                partitionConstraint, adapterFactory);
 +    }
 +
 +    public Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> buildBtreeRuntime(JobSpecification jobSpec,
 +            List<LogicalVariable> outputVars, IOperatorSchema opSchema, IVariableTypeEnvironment typeEnv,
 +            JobGenContext context, boolean retainInput, boolean retainNull, Dataset dataset, String indexName,
 +            int[] lowKeyFields, int[] highKeyFields, boolean lowKeyInclusive, boolean highKeyInclusive,
 +            Object implConfig, int[] minFilterFieldIndexes, int[] maxFilterFieldIndexes) throws AlgebricksException {
 +
 +        boolean isSecondary = true;
 +        int numSecondaryKeys = 0;
 +        try {
 +            boolean temp = dataset.getDatasetDetails().isTemp();
 +            Index primaryIndex = MetadataManager.INSTANCE.getIndex(mdTxnCtx, dataset.getDataverseName(),
 +                    dataset.getDatasetName(), dataset.getDatasetName());
 +            if (primaryIndex != null && (dataset.getDatasetType() != DatasetType.EXTERNAL)) {
 +                isSecondary = !indexName.equals(primaryIndex.getIndexName());
 +            }
 +            int numPrimaryKeys = DatasetUtils.getPartitioningKeys(dataset).size();
 +            RecordDescriptor outputRecDesc = JobGenHelper.mkRecordDescriptor(typeEnv, opSchema, context);
 +            int[] bloomFilterKeyFields;
 +            ITypeTraits[] typeTraits;
 +            IBinaryComparatorFactory[] comparatorFactories;
 +
 +            ARecordType itemType = (ARecordType) this.findType(dataset.getItemTypeDataverseName(),
 +                    dataset.getItemTypeName());
 +            ITypeTraits[] filterTypeTraits = DatasetUtils.computeFilterTypeTraits(dataset, itemType);
 +            IBinaryComparatorFactory[] filterCmpFactories = DatasetUtils.computeFilterBinaryComparatorFactories(dataset,
 +                    itemType, context.getBinaryComparatorFactoryProvider());
 +            int[] filterFields = null;
 +            int[] btreeFields = null;
 +
 +            if (isSecondary) {
 +                Index secondaryIndex = MetadataManager.INSTANCE.getIndex(mdTxnCtx, dataset.getDataverseName(),
 +                        dataset.getDatasetName(), indexName);
 +                numSecondaryKeys = secondaryIndex.getKeyFieldNames().size();
 +                bloomFilterKeyFields = new int[numSecondaryKeys];
 +                for (int i = 0; i < numSecondaryKeys; i++) {
 +                    bloomFilterKeyFields[i] = i;
 +                }
 +                Pair<IBinaryComparatorFactory[], ITypeTraits[]> comparatorFactoriesAndTypeTraits = getComparatorFactoriesAndTypeTraitsOfSecondaryBTreeIndex(
 +                        secondaryIndex.getIndexType(), secondaryIndex.getKeyFieldNames(),
 +                        secondaryIndex.getKeyFieldTypes(), DatasetUtils.getPartitioningKeys(dataset), itemType,
 +                        dataset.getDatasetType());
 +                comparatorFactories = comparatorFactoriesAndTypeTraits.first;
 +                typeTraits = comparatorFactoriesAndTypeTraits.second;
 +                if (filterTypeTraits != null) {
 +                    filterFields = new int[1];
 +                    filterFields[0] = numSecondaryKeys + numPrimaryKeys;
 +                    btreeFields = new int[numSecondaryKeys + numPrimaryKeys];
 +                    for (int k = 0; k < btreeFields.length; k++) {
 +                        btreeFields[k] = k;
 +                    }
 +                }
 +
 +            } else {
 +                bloomFilterKeyFields = new int[numPrimaryKeys];
 +                for (int i = 0; i < numPrimaryKeys; i++) {
 +                    bloomFilterKeyFields[i] = i;
 +                }
- 
-                 typeTraits = DatasetUtils.computeTupleTypeTraits(dataset, itemType);
++                // get meta item type
++                ARecordType metaItemType = DatasetUtils.getMetaType(this, dataset);
++                typeTraits = DatasetUtils.computeTupleTypeTraits(dataset, itemType, metaItemType);
 +                comparatorFactories = DatasetUtils.computeKeysBinaryComparatorFactories(dataset, itemType,
 +                        context.getBinaryComparatorFactoryProvider());
- 
 +                filterFields = DatasetUtils.createFilterFields(dataset);
 +                btreeFields = DatasetUtils.createBTreeFieldsWhenThereisAFilter(dataset);
 +            }
 +
 +            IAsterixApplicationContextInfo appContext = (IAsterixApplicationContextInfo) context.getAppContext();
 +            Pair<IFileSplitProvider, AlgebricksPartitionConstraint> spPc;
 +            try {
 +                spPc = splitProviderAndPartitionConstraintsForDataset(dataset.getDataverseName(),
 +                        dataset.getDatasetName(), indexName, temp);
 +            } catch (Exception e) {
 +                throw new AlgebricksException(e);
 +            }
 +
 +            ISearchOperationCallbackFactory searchCallbackFactory = null;
 +            if (isSecondary) {
 +                searchCallbackFactory = temp ? NoOpOperationCallbackFactory.INSTANCE
 +                        : new SecondaryIndexSearchOperationCallbackFactory();
 +            } else {
 +                JobId jobId = ((JobEventListenerFactory) jobSpec.getJobletEventListenerFactory()).getJobId();
 +                int datasetId = dataset.getDatasetId();
 +                int[] primaryKeyFields = new int[numPrimaryKeys];
 +                for (int i = 0; i < numPrimaryKeys; i++) {
 +                    primaryKeyFields[i] = i;
 +                }
 +
 +                AqlMetadataImplConfig aqlMetadataImplConfig = (AqlMetadataImplConfig) implConfig;
 +                ITransactionSubsystemProvider txnSubsystemProvider = new TransactionSubsystemProvider();
 +                if (aqlMetadataImplConfig != null && aqlMetadataImplConfig.isInstantLock()) {
 +                    searchCallbackFactory = temp ? NoOpOperationCallbackFactory.INSTANCE
 +                            : new PrimaryIndexInstantSearchOperationCallbackFactory(jobId, datasetId, primaryKeyFields,
 +                                    txnSubsystemProvider, ResourceType.LSM_BTREE);
 +                } else {
 +                    searchCallbackFactory = temp ? NoOpOperationCallbackFactory.INSTANCE
 +                            : new PrimaryIndexSearchOperationCallbackFactory(jobId, datasetId, primaryKeyFields,
 +                                    txnSubsystemProvider, ResourceType.LSM_BTREE);
 +                }
 +            }
 +            Pair<ILSMMergePolicyFactory, Map<String, String>> compactionInfo = DatasetUtils
 +                    .getMergePolicyFactory(dataset, mdTxnCtx);
 +            AsterixRuntimeComponentsProvider rtcProvider = AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER;
 +            BTreeSearchOperatorDescriptor btreeSearchOp;
 +            if (dataset.getDatasetType() == DatasetType.INTERNAL) {
 +                btreeSearchOp = new BTreeSearchOperatorDescriptor(jobSpec, outputRecDesc,
 +                        appContext.getStorageManagerInterface(), appContext.getIndexLifecycleManagerProvider(),
 +                        spPc.first, typeTraits, comparatorFactories, bloomFilterKeyFields, lowKeyFields, highKeyFields,
 +                        lowKeyInclusive, highKeyInclusive,
 +                        new LSMBTreeDataflowHelperFactory(new AsterixVirtualBufferCacheProvider(dataset.getDatasetId()),
 +                                compactionInfo.first, compactionInfo.second,
 +                                isSecondary ? new SecondaryIndexOperationTrackerProvider(dataset.getDatasetId())
 +                                        : new PrimaryIndexOperationTrackerProvider(dataset.getDatasetId()),
 +                                rtcProvider, LSMBTreeIOOperationCallbackFactory.INSTANCE,
 +                                storageProperties.getBloomFilterFalsePositiveRate(), !isSecondary, filterTypeTraits,
 +                                filterCmpFactories, btreeFields, filterFields, !temp),
 +                        retainInput, retainNull, context.getNullWriterFactory(), searchCallbackFactory,
 +                        minFilterFieldIndexes, maxFilterFieldIndexes);
 +            } else {
 +                // External dataset <- use the btree with buddy btree->
 +                // Be Careful of Key Start Index ?
 +                int[] buddyBreeFields = new int[] { numSecondaryKeys };
 +                ExternalBTreeWithBuddyDataflowHelperFactory indexDataflowHelperFactory = new ExternalBTreeWithBuddyDataflowHelperFactory(
 +                        compactionInfo.first, compactionInfo.second,
 +                        new SecondaryIndexOperationTrackerProvider(dataset.getDatasetId()),
 +                        AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER,
 +                        LSMBTreeWithBuddyIOOperationCallbackFactory.INSTANCE,
 +                        getStorageProperties().getBloomFilterFalsePositiveRate(), buddyBreeFields,
 +                        ExternalDatasetsRegistry.INSTANCE.getAndLockDatasetVersion(dataset, this), !temp);
 +                btreeSearchOp = new ExternalBTreeSearchOperatorDescriptor(jobSpec, outputRecDesc, rtcProvider,
 +                        rtcProvider, spPc.first, typeTraits, comparatorFactories, bloomFilterKeyFields, lowKeyFields,
 +                        highKeyFields, lowKeyInclusive, highKeyInclusive, indexDataflowHelperFactory, retainInput,
 +                        retainNull, context.getNullWriterFactory(), searchCallbackFactory);
 +            }
 +
 +            return new Pair<IOperatorDescriptor, AlgebricksPartitionConstraint>(btreeSearchOp, spPc.second);
 +
 +        } catch (MetadataException me) {
 +            throw new AlgebricksException(me);
 +        }
 +    }
 +
 +    private Pair<IBinaryComparatorFactory[], ITypeTraits[]> getComparatorFactoriesAndTypeTraitsOfSecondaryBTreeIndex(
 +            IndexType indexType, List<List<String>> sidxKeyFieldNames, List<IAType> sidxKeyFieldTypes,
 +            List<List<String>> pidxKeyFieldNames, ARecordType recType, DatasetType dsType) throws AlgebricksException {
 +
 +        IBinaryComparatorFactory[] comparatorFactories;
 +        ITypeTraits[] typeTraits;
 +        int sidxKeyFieldCount = sidxKeyFieldNames.size();
 +        int pidxKeyFieldCount = pidxKeyFieldNames.size();
 +        typeTraits = new ITypeTraits[sidxKeyFieldCount + pidxKeyFieldCount];
 +        comparatorFactories = new IBinaryComparatorFactory[sidxKeyFieldCount + pidxKeyFieldCount];
 +
 +        int i = 0;
 +        for (; i < sidxKeyFieldCount; ++i) {
 +            Pair<IAType, Boolean> keyPairType = Index.getNonNullableOpenFieldType(sidxKeyFieldTypes.get(i),
 +                    sidxKeyFieldNames.get(i), recType);
 +            IAType keyType = keyPairType.first;
 +            comparatorFactories[i] = AqlBinaryComparatorFactoryProvider.INSTANCE.getBinaryComparatorFactory(keyType,
 +                    true);
 +            typeTraits[i] = AqlTypeTraitProvider.INSTANCE.getTypeTrait(keyType);
 +        }
 +
 +        for (int j = 0; j < pidxKeyFieldCount; ++j, ++i) {
 +            IAType keyType = null;
 +            try {
 +                switch (dsType) {
 +                    case INTERNAL:
 +                        keyType = recType.getSubFieldType(pidxKeyFieldNames.get(j));
 +                        break;
 +                    case EXTERNAL:
 +                        keyType = IndexingConstants.getFieldType(j);
 +                        break;
 +                    default:
 +                        throw new AlgebricksException("Unknown Dataset Type");
 +                }
 +            } catch (AsterixException e) {
 +                throw new AlgebricksException(e);
 +            }
 +            comparatorFactories[i] = AqlBinaryComparatorFactoryProvider.INSTANCE.getBinaryComparatorFactory(keyType,
 +                    true);
 +            typeTraits[i] = AqlTypeTraitProvider.INSTANCE.getTypeTrait(keyType);
 +        }
 +
 +        return new Pair<IBinaryComparatorFactory[], ITypeTraits[]>(comparatorFactories, typeTraits);
 +    }
 +
 +    public Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> buildRtreeRuntime(JobSpecification jobSpec,
 +            List<LogicalVariable> outputVars, IOperatorSchema opSchema, IVariableTypeEnvironment typeEnv,
 +            JobGenContext context, boolean retainInput, boolean retainNull, Dataset dataset, String indexName,
 +            int[] keyFields, int[] minFilterFieldIndexes, int[] maxFilterFieldIndexes) throws AlgebricksException {
 +
 +        try {
 +            ARecordType recType = (ARecordType) findType(dataset.getItemTypeDataverseName(), dataset.getItemTypeName());
 +            int numPrimaryKeys = DatasetUtils.getPartitioningKeys(dataset).size();
 +
 +            boolean temp = dataset.getDatasetDetails().isTemp();
 +            Index secondaryIndex = MetadataManager.INSTANCE.getIndex(mdTxnCtx, dataset.getDataverseName(),
 +                    dataset.getDatasetName(), indexName);
 +            if (secondaryIndex == null) {
 +                throw new AlgebricksException(
 +                        "Code generation error: no index " + indexName + " for dataset " + dataset.getDatasetName());
 +            }
 +            List<List<String>> secondaryKeyFields = secondaryIndex.getKeyFieldNames();
 +            List<IAType> secondaryKeyTypes = secondaryIndex.getKeyFieldTypes();
 +            int numSecondaryKeys = secondaryKeyFields.size();
 +            if (numSecondaryKeys != 1) {
 +                throw new AlgebricksException("Cannot use " + numSecondaryKeys
 +                        + " fields as a key for the R-tree index. There can be only one field as a key for the R-tree index.");
 +            }
 +            Pair<IAType, Boolean> keyTypePair = Index.getNonNullableOpenFieldType(secondaryKeyTypes.get(0),
 +                    secondaryKeyFields.get(0), recType);
 +            IAType keyType = keyTypePair.first;
 +            if (keyType == null) {
 +                throw new AlgebricksException("Could not find field " + secondaryKeyFields.get(0) + " in the schema.");
 +            }
 +            int numDimensions = NonTaggedFormatUtil.getNumDimensions(keyType.getTypeTag());
 +            int numNestedSecondaryKeyFields = numDimensions * 2;
 +            IPrimitiveValueProviderFactory[] valueProviderFactories = new IPrimitiveValueProviderFactory[numNestedSecondaryKeyFields];
 +            for (int i = 0; i < numNestedSecondaryKeyFields; i++) {
 +                valueProviderFactories[i] = AqlPrimitiveValueProviderFactory.INSTANCE;
 +            }
 +
 +            RecordDescriptor outputRecDesc = JobGenHelper.mkRecordDescriptor(typeEnv, opSchema, context);
 +            // IS NOT THE VARIABLE BELOW ALWAYS = 0 ??
 +            int keysStartIndex = outputRecDesc.getFieldCount() - numNestedSecondaryKeyFields - numPrimaryKeys;
 +            if (retainInput) {
 +                keysStartIndex -= numNestedSecondaryKeyFields;
 +            }
 +            IBinaryComparatorFactory[] comparatorFactories = JobGenHelper.variablesToAscBinaryComparatorFactories(
 +                    outputVars, keysStartIndex, numNestedSecondaryKeyFields, typeEnv, context);
 +            ITypeTraits[] typeTraits = JobGenHelper.variablesToTypeTraits(outputVars, keysStartIndex,
 +                    numNestedSecondaryKeyFields + numPrimaryKeys, typeEnv, context);
 +            IAsterixApplicationContextInfo appContext = (IAsterixApplicationContextInfo) context.getAppContext();
 +            Pair<IFileSplitProvider, AlgebricksPartitionConstraint> spPc = splitProviderAndPartitionConstraintsForDataset(
 +                    dataset.getDataverseName(), dataset.getDatasetName(), indexName, temp);
 +
 +            IBinaryComparatorFactory[] primaryComparatorFactories = DatasetUtils.computeKeysBinaryComparatorFactories(
 +                    dataset, recType, context.getBinaryComparatorFactoryProvider());
 +            int[] btreeFields = new int[primaryComparatorFactories.length];
 +            for (int i = 0; i < btreeFields.length; i++) {
 +                btreeFields[i] = i + numNestedSecondaryKeyFields;
 +            }
 +
 +            ITypeTraits[] filterTypeTraits = DatasetUtils.computeFilterTypeTraits(dataset, recType);
 +            IBinaryComparatorFactory[] filterCmpFactories = DatasetUtils.computeFilterBinaryComparatorFactories(dataset,
 +                    recType, context.getBinaryComparatorFactoryProvider());
 +            int[] filterFields = null;
 +            int[] rtreeFields = null;
 +            if (filterTypeTraits != null) {
 +                filterFields = new int[1];
 +                filterFields[0] = numNestedSecondaryKeyFields + numPrimaryKeys;
 +                rtreeFields = new int[numNestedSecondaryKeyFields + numPrimaryKeys];
 +                for (int i = 0; i < rtreeFields.length; i++) {
 +                    rtreeFields[i] = i;
 +                }
 +            }
 +
 +            IAType nestedKeyType = NonTaggedFormatUtil.getNestedSpatialType(keyType.getTypeTag());
 +            Pair<ILSMMergePolicyFactory, Map<String, String>> compactionInfo = DatasetUtils
 +                    .getMergePolicyFactory(dataset, mdTxnCtx);
 +            ISearchOperationCallbackFactory searchCallbackFactory = temp ? NoOpOperationCallbackFactory.INSTANCE
 +                    : new SecondaryIndexSearchOperationCallbackFactory();
 +
 +            RTreeSearchOperatorDescriptor rtreeSearchOp;
 +            if (dataset.getDatasetType() == DatasetType.INTERNAL) {
 +                rtreeSearchOp = new RTreeSearchOperatorDescriptor(jobSpec, outputRecDesc,
 +                        appContext.getStorageManagerInterface(), appContext.getIndexLifecycleManagerProvider(),
 +                        spPc.first, typeTraits, comparatorFactories, keyFields,
 +                        new LSMRTreeDataflowHelperFactory(valueProviderFactories, RTreePolicyType.RTREE,
 +                                primaryComparatorFactories,
 +                                new AsterixVirtualBufferCacheProvider(dataset.getDatasetId()), compactionInfo.first,
 +                                compactionInfo.second,
 +                                new SecondaryIndexOperationTrackerProvider(dataset.getDatasetId()),
 +                                AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER,
 +                                LSMRTreeIOOperationCallbackFactory.INSTANCE,
 +                                proposeLinearizer(nestedKeyType.getTypeTag(), comparatorFactories.length),
 +                                storageProperties.getBloomFilterFalsePositiveRate(), rtreeFields, btreeFields,
 +                                filterTypeTraits, filterCmpFactories, filterFields, !temp),
 +                        retainInput, retainNull, context.getNullWriterFactory(), searchCallbackFactory,
 +                        minFilterFieldIndexes, maxFilterFieldIndexes);
 +
 +            } else {
 +                // External Dataset
 +                ExternalRTreeDataflowHelperFactory indexDataflowHelperFactory = new ExternalRTreeDataflowHelperFactory(
 +                        valueProviderFactories, RTreePolicyType.RTREE,
 +                        IndexingConstants.getBuddyBtreeComparatorFactories(), compactionInfo.first,
 +                        compactionInfo.second, new SecondaryIndexOperationTrackerProvider(dataset.getDatasetId()),
 +                        AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER, LSMRTreeIOOperationCallbackFactory.INSTANCE,
 +                        proposeLinearizer(nestedKeyType.getTypeTag(), comparatorFactories.length),
 +                        getStorageProperties().getBloomFilterFalsePositiveRate(),
 +                        new int[] { numNestedSecondaryKeyFields },
 +                        ExternalDatasetsRegistry.INSTANCE.getAndLockDatasetVersion(dataset, this), !temp);
 +                // Create the operator
 +                rtreeSearchOp = new ExternalRTreeSearchOperatorDescriptor(jobSpec, outputRecDesc,
 +                        appContext.getStorageManagerInterface(), appContext.getIndexLifecycleManagerProvider(),
 +                        spPc.first, typeTraits, comparatorFactories, keyFields, indexDataflowHelperFactory, retainInput,
 +                        retainNull, context.getNullWriterFactory(), searchCallbackFactory);
 +            }
 +
 +            return new Pair<IOperatorDescriptor, AlgebricksPartitionConstraint>(rtreeSearchOp, spPc.second);
 +
 +        } catch (MetadataException me) {
 +            throw new AlgebricksException(me);
 +        }
 +    }
 +
 +    @Override
 +    public Pair<IPushRuntimeFactory, AlgebricksPartitionConstraint> getWriteFileRuntime(IDataSink sink,
 +            int[] printColumns, IPrinterFactory[] printerFactories, RecordDescriptor inputDesc) {
 +        FileSplitDataSink fsds = (FileSplitDataSink) sink;
 +        FileSplitSinkId fssi = fsds.getId();
 +        FileSplit fs = fssi.getFileSplit();
 +        File outFile = fs.getLocalFile().getFile();
 +        String nodeId = fs.getNodeName();
 +
 +        SinkWriterRuntimeFactory runtime = new SinkWriterRuntimeFactory(printColumns, printerFactories, outFile,
 +                getWriterFactory(), inputDesc);
 +        AlgebricksPartitionConstraint apc = new AlgebricksAbsolutePartitionConstraint(new String[] { nodeId });
 +        return new Pair<IPushRuntimeFactory, AlgebricksPartitionConstraint>(runtime, apc);
 +    }
 +
 +    @Override
 +    public Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> getResultHandleRuntime(IDataSink sink,
 +            int[] printColumns, IPrinterFactory[] printerFactories, RecordDescriptor inputDesc, boolean ordered,
 +            JobSpecification spec) throws AlgebricksException {
 +        ResultSetDataSink rsds = (ResultSetDataSink) sink;
 +        ResultSetSinkId rssId = rsds.getId();
 +        ResultSetId rsId = rssId.getResultSetId();
 +
 +        ResultWriterOperatorDescriptor resultWriter = null;
 +        try {
 +            IResultSerializerFactory resultSerializedAppenderFactory = resultSerializerFactoryProvider
 +                    .getAqlResultSerializerFactoryProvider(printColumns, printerFactories, getWriterFactory());
 +            resultWriter = new ResultWriterOperatorDescriptor(spec, rsId, ordered, getResultAsyncMode(),
 +                    resultSerializedAppenderFactory);
 +        } catch (IOException e) {
 +            throw new AlgebricksException(e);
 +        }
 +
 +        return new Pair<IOperatorDescriptor, AlgebricksPartitionConstraint>(resultWriter, null);
 +    }
 +
 +    @Override
 +    public IDataSourceIndex<String, AqlSourceId> findDataSourceIndex(String indexId, AqlSourceId dataSourceId)
 +            throws AlgebricksException {
 +        AqlDataSource ads = findDataSource(dataSourceId);
 +        Dataset dataset = ((DatasetDataSource) ads).getDataset();
 +
 +        try {
 +            String indexName = indexId;
 +            Index secondaryIndex = MetadataManager.INSTANCE.getIndex(mdTxnCtx, dataset.getDataverseName(),
 +                    dataset.getDatasetName(), indexName);
 +            if (secondaryIndex != null) {
 +                return new AqlIndex(secondaryIndex, dataset.getDataverseName(), dataset.getDatasetName(), this);
 +            } else {
 +                Index primaryIndex = MetadataManager.INSTANCE.getIndex(mdTxnCtx, dataset.getDataverseName(),
 +                        dataset.getDatasetName(), dataset.getDatasetName());
 +                if (primaryIndex.getIndexName().equals(indexId)) {
 +                    return new AqlIndex(primaryIndex, dataset.getDataverseName(), dataset.getDatasetName(), this);
 +                } else {
 +                    return null;
 +                }
 +            }
 +        } catch (MetadataException me) {
 +            throw new AlgebricksException(me);
 +        }
 +    }
 +
 +    public AqlDataSource lookupSourceInMetadata(AqlSourceId aqlId) throws AlgebricksException, MetadataException {
 +        Dataset dataset = findDataset(aqlId.getDataverseName(), aqlId.getDatasourceName());
 +        if (dataset == null) {
 +            throw new AlgebricksException("Datasource with id " + aqlId + " was not found.");
 +        }
 +        IAType itemType = findType(dataset.getItemTypeDataverseName(), dataset.getItemTypeName());
 +        IAType metaItemType = findType(dataset.getMetaItemTypeDataverseName(), dataset.getMetaItemTypeName());
 +        INodeDomain domain = findNodeDomain(dataset.getNodeGroupName());
 +        AqlDataSourceType datasourceType = dataset.getDatasetType().equals(DatasetType.EXTERNAL)
 +                ? AqlDataSourceType.EXTERNAL_DATASET : AqlDataSourceType.INTERNAL_DATASET;
 +        return new DatasetDataSource(aqlId, dataset, itemType, metaItemType, datasourceType,
 +                dataset.getDatasetDetails(), domain);
 +    }
 +
 +    @Override
 +    public boolean scannerOperatorIsLeaf(IDataSource<AqlSourceId> dataSource) {
 +        boolean result = false;
 +        switch (((AqlDataSource) dataSource).getDatasourceType()) {
 +            case INTERNAL_DATASET:
 +            case EXTERNAL_DATASET:
 +                result = ((DatasetDataSource) dataSource).getDataset().getDatasetType() == DatasetType.EXTERNAL;
 +                break;
 +            case FEED:
 +                result = true;
 +                break;
 +            case LOADABLE:
 +                result = true;
 +                break;
 +            default:
 +                break;
 +        }
 +        return result;
 +    }
 +
 +    @Override
 +    public Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> getWriteResultRuntime(
 +            IDataSource<AqlSourceId> dataSource, IOperatorSchema propagatedSchema, List<LogicalVariable> keys,
 +            LogicalVariable payload, List<LogicalVariable> additionalNonKeyFields, JobGenContext context,
 +            JobSpecification spec) throws AlgebricksException {
 +        String dataverseName = dataSource.getId().getDataverseName();
 +        String datasetName = dataSource.getId().getDatasourceName();
 +
 +        Dataset dataset = findDataset(dataverseName, datasetName);
 +        if (dataset == null) {
 +            throw new AlgebricksException("Unknown dataset " + datasetName + " in dataverse " + dataverseName);
 +        }
 +
 +        int numKeys = keys.size();
 +        int numFilterFields = DatasetUtils.getFilterField(dataset) == null ? 0 : 1;
 +
 +        // move key fields to front
 +        int[] fieldPermutation = new int[numKeys + 1 + numFilterFields];
 +        int[] bloomFilterKeyFields = new int[numKeys];
 +        // System.arraycopy(keys, 0, fieldPermutation, 0, numKeys);
 +        int i = 0;
 +        for (LogicalVariable varKey : keys) {
 +            int idx = propagatedSchema.findVariable(varKey);
 +            fieldPermutation[i] = idx;
 +            bloomFilterKeyFields[i] = i;
 +            i++;
 +        }
 +        fieldPermutation[numKeys] = propagatedSchema.findVariable(payload);
 +        if (numFilterFields > 0) {
 +            int idx = propagatedSchema.findVariable(additionalNonKeyFields.get(0));
 +            fieldPermutation[numKeys + 1] = idx;
 +        }
 +
 +        try {
 +            boolean temp = dataset.getDatasetDetails().isTemp();
 +            isTemporaryDatasetWriteJob = isTemporaryDatasetWriteJob && temp;
 +
 +            Index primaryIndex = MetadataManager.INSTANCE.getIndex(mdTxnCtx, dataset.getDataverseName(),
 +                    dataset.getDatasetName(), dataset.getDatasetName());
 +            String indexName = primaryIndex.getIndexName();
 +
 +            String itemTypeName = dataset.getItemTypeName();
 +            ARecordType itemType = (ARecordType) MetadataManager.INSTANCE
 +                    .getDatatype(mdTxnCtx, dataset.getItemTypeDataverseName(), itemTypeName).getDatatype();
-             ITypeTraits[] typeTraits = DatasetUtils.computeTupleTypeTraits(dataset, itemType);
++            ITypeTraits[] typeTraits = DatasetUtils.computeTupleTypeTraits(dataset, itemType, null);
 +            IBinaryComparatorFactory[] comparatorFactories = DatasetUtils.computeKeysBinaryComparatorFactories(dataset,
 +                    itemType, context.getBinaryComparatorFactoryProvider());
 +
 +            Pair<IFileSplitProvider, AlgebricksPartitionConstraint> splitsAndConstraint = splitProviderAndPartitionConstraintsForDataset(
 +                    dataSource.getId().getDataverseName(), datasetName, indexName, temp);
 +            IAsterixApplicationContextInfo appContext = (IAsterixApplicationContextInfo) context.getAppContext();
 +
 +            long numElementsHint = getCardinalityPerPartitionHint(dataset);
 +
 +            ITypeTraits[] filterTypeTraits = DatasetUtils.computeFilterTypeTraits(dataset, itemType);
 +            IBinaryComparatorFactory[] filterCmpFactories = DatasetUtils.computeFilterBinaryComparatorFactories(dataset,
 +                    itemType, context.getBinaryComparatorFactoryProvider());
 +            int[] filterFields = DatasetUtils.createFilterFields(dataset);
 +            int[] btreeFields = DatasetUtils.createBTreeFieldsWhenThereisAFilter(dataset);
 +
 +            // TODO
 +            // figure out the right behavior of the bulkload and then give the
 +            // right callback
 +            // (ex. what's the expected behavior when there is an error during
 +            // bulkload?)
 +            Pair<ILSMMergePolicyFactory, Map<String, String>> compactionInfo = DatasetUtils
 +                    .getMergePolicyFactory(dataset, mdTxnCtx);
 +            TreeIndexBulkLoadOperatorDescriptor btreeBulkLoad = new TreeIndexBulkLoadOperatorDescriptor(spec, null,
 +                    appContext.getStorageManagerInterface(), appContext.getIndexLifecycleManagerProvider(),
 +                    splitsAndConstraint.first, typeTraits, comparatorFactories, bloomFilterKeyFields, fieldPermutation,
 +                    GlobalConfig.DEFAULT_TREE_FILL_FACTOR, false, numElementsHint, true,
 +                    new LSMBTreeDataflowHelperFactory(new AsterixVirtualBufferCacheProvider(dataset.getDatasetId()),
 +                            compactionInfo.first, compactionInfo.second,
 +                            new PrimaryIndexOperationTrackerProvider(dataset.getDatasetId()),
 +                            AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER,
 +                            LSMBTreeIOOperationCallbackFactory.INSTANCE,
 +                            storageProperties.getBloomFilterFalsePositiveRate(), true, filterTypeTraits,
 +                            filterCmpFactories, btreeFields, filterFields, !temp));
 +            return new Pair<IOperatorDescriptor, AlgebricksPartitionConstraint>(btreeBulkLoad,
 +                    splitsAndConstraint.second);
 +        } catch (MetadataException me) {
 +            throw new AlgebricksException(me);
 +        }
 +    }
 +
 +    public Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> getInsertOrDeleteRuntime(IndexOperation indexOp,
 +            IDataSource<AqlSourceId> dataSource, IOperatorSchema propagatedSchema, IVariableTypeEnvironment typeEnv,
 +            List<LogicalVariable> keys, LogicalVariable payload, List<LogicalVariable> additionalNonKeyFields,
 +            RecordDescriptor recordDesc, JobGenContext context, JobSpecification spec, boolean bulkload,
 +            List<LogicalVariable> additionalNonFilteringFields) throws AlgebricksException {
 +
 +        String datasetName = dataSource.getId().getDatasourceName();
 +        Dataset dataset = findDataset(dataSource.getId().getDataverseName(), datasetName);
 +        if (dataset == null) {
 +            throw new AlgebricksException(
 +                    "Unknown dataset " + datasetName + " in dataverse " + dataSource.getId().getDataverseName());
 +        }
 +        boolean temp = dataset.getDatasetDetails().isTemp();
 +        isTemporaryDatasetWriteJob = isTemporaryDatasetWriteJob && temp;
 +
 +        int numKeys = keys.size();
 +        int numFilterFields = DatasetUtils.getFilterField(dataset) == null ? 0 : 1;
 +        // Move key fields to front.
 +        int[] fieldPermutation = new int[numKeys + 1 + numFilterFields
 +                + (additionalNonFilteringFields == null ? 0 : additionalNonFilteringFields.size())];
 +        int[] bloomFilterKeyFields = new int[numKeys];
 +        int i = 0;
 +        for (LogicalVariable varKey : keys) {
 +            int idx = propagatedSchema.findVariable(varKey);
 +            fieldPermutation[i] = idx;
 +            bloomFilterKeyFields[i] = i;
 +            i++;
 +        }
 +        fieldPermutation[i++] = propagatedSchema.findVariable(payload);
 +        if (numFilterFields > 0) {
 +            int idx = propagatedSchema.findVariable(additionalNonKeyFields.get(0));
 +            fieldPermutation[i++] = idx;
 +        }
 +        if (additionalNonFilteringFields != null) {
 +            for (LogicalVariable variable : additionalNonFilteringFields) {
 +                int idx = propagatedSchema.findVariable(variable);
 +                fieldPermutation[i++] = idx;
 +            }
 +        }
 +
 +        try {
 +            Index primaryIndex = MetadataManager.INSTANCE.getIndex(mdTxnCtx, dataset.getDataverseName(),
 +                    dataset.getDatasetName(), dataset.getDatasetName());
 +            String indexName = primaryIndex.getIndexName();
- 
-             String itemTypeName = dataset.getItemTypeName();
 +            ARecordType itemType = (ARecordType) MetadataManager.INSTANCE
-                     .getDatatype(mdTxnCtx, dataset.getItemTypeDataverseName(), itemTypeName).getDatatype();
- 
-             ITypeTraits[] typeTraits = DatasetUtils.computeTupleTypeTraits(dataset, itemType);
++                    .getDatatype(mdTxnCtx, dataset.getItemTypeDataverseName(), dataset.getItemTypeName()).getDatatype();
++            ARecordType metaItemType = DatasetUtils.getMetaType(this, dataset);
++            ITypeTraits[] typeTraits = DatasetUtils.computeTupleTypeTraits(dataset, itemType, metaItemType);
 +
 +            IAsterixApplicationContextInfo appContext = (IAsterixApplicationContextInfo) context.getAppContext();
 +            IBinaryComparatorFactory[] comparatorFactories = DatasetUtils.computeKeysBinaryComparatorFactories(dataset,
 +                    itemType, context.getBinaryComparatorFactoryProvider());
 +            Pair<IFileSplitProvider, AlgebricksPartitionConstraint> splitsAndConstraint = splitProviderAndPartitionConstraintsForDataset(
 +                    dataSource.getId().getDataverseName(), datasetName, indexName, temp);
 +
 +            // prepare callback
 +            JobId jobId = ((JobEventListenerFactory) spec.getJobletEventListenerFactory()).getJobId();
 +            int datasetId = dataset.getDatasetId();
 +            int[] primaryKeyFields = new int[numKeys];
 +            for (i = 0; i < numKeys; i++) {
 +                primaryKeyFields[i] = i;
 +            }
 +
 +            ITypeTraits[] filterTypeTraits = DatasetUtils.computeFilterTypeTraits(dataset, itemType);
 +            IBinaryComparatorFactory[] filterCmpFactories = DatasetUtils.computeFilterBinaryComparatorFactories(dataset,
 +                    itemType, context.getBinaryComparatorFactoryProvider());
 +            int[] filterFields = DatasetUtils.createFilterFields(dataset);
 +            int[] btreeFields = DatasetUtils.createBTreeFieldsWhenThereisAFilter(dataset);
 +
 +            TransactionSubsystemProvider txnSubsystemProvider = new TransactionSubsystemProvider();
 +            IModificationOperationCallbackFactory modificationCallbackFactory = temp
 +                    ? new TempDatasetPrimaryIndexModificationOperationCallbackFactory(jobId, datasetId,
 +                            primaryKeyFields, txnSubsystemProvider, indexOp, ResourceType.LSM_BTREE)
 +                    : new PrimaryIndexModificationOperationCallbackFactory(jobId, datasetId, primaryKeyFields,
 +                            txnSubsystemProvider, indexOp, ResourceType.LSM_BTREE);
 +
 +            Pair<ILSMMergePolicyFactory, Map<String, String>> compactionInfo = DatasetUtils
 +                    .getMergePolicyFactory(dataset, mdTxnCtx);
 +            IIndexDataflowHelperFactory idfh = new LSMBTreeDataflowHelperFactory(
 +                    new AsterixVirtualBufferCacheProvider(datasetId), compactionInfo.first, compactionInfo.second,
 +                    new PrimaryIndexOperationTrackerProvider(dataset.getDatasetId()),
 +                    AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER, LSMBTreeIOOperationCallbackFactory.INSTANCE,
 +                    storageProperties.getBloomFilterFalsePositiveRate(), true, filterTypeTraits, filterCmpFactories,
 +                    btreeFields, filterFields, !temp);
 +            IOperatorDescriptor op;
 +            if (bulkload) {
 +                long numElementsHint = getCardinalityPerPartitionHint(dataset);
 +                op = new TreeIndexBulkLoadOperatorDescriptor(spec, recordDesc, appContext.getStorageManagerInterface(),
 +                        appContext.getIndexLifecycleManagerProvider(), splitsAndConstraint.first, typeTraits,
 +                        comparatorFactories, bloomFilterKeyFields, fieldPermutation,
 +                        GlobalConfig.DEFAULT_TREE_FILL_FACTOR, true, numElementsHint, true, idfh);
 +            } else {
 +                op = new AsterixLSMTreeInsertDeleteOperatorDescriptor(spec, recordDesc,
 +                        appContext.getStorageManagerInterface(), appContext.getIndexLifecycleManagerProvider(),
 +                        splitsAndConstraint.first, typeTraits, comparatorFactories, bloomFilterKeyFields,
 +                        fieldPermutation, indexOp, idfh, null, true, indexName, null, modificationCallbackFactory,
 +                        NoOpOperationCallbackFactory.INSTANCE);
 +            }
 +            return new Pair<IOperatorDescriptor, AlgebricksPartitionConstraint>(op, splitsAndConstraint.second);
 +
 +        } catch (MetadataException me) {
 +            throw new AlgebricksException(me);
 +        }
 +    }
 +
 +    @Override
 +    public Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> getInsertRuntime(
 +            IDataSource<AqlSourceId> dataSource, IOperatorSchema propagatedSchema, IVariableTypeEnvironment typeEnv,
 +            List<LogicalVariable> keys, LogicalVariable payload, List<LogicalVariable> additionalNonKeyFields,
 +            List<LogicalVariable> additionalNonFilteringFields, RecordDescriptor recordDesc, JobGenContext context,
 +            JobSpecification spec, boolean bulkload) throws AlgebricksException {
 +        return getInsertOrDeleteRuntime(IndexOperation.INSERT, dataSource, propagatedSchema, typeEnv, keys, payload,
 +                additionalNonKeyFields, recordDesc, context, spec, bulkload, additionalNonFilteringFields);
 +    }
 +
 +    @Override
 +    public Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> getDeleteRuntime(
 +            IDataSource<AqlSourceId> dataSource, IOperatorSchema propagatedSchema, IVariableTypeEnvironment typeEnv,
 +            List<LogicalVariable> keys, LogicalVariable payload, List<LogicalVariable> additionalNonKeyFields,
 +            RecordDescriptor recordDesc, JobGenContext context, JobSpecification spec) throws AlgebricksException {
 +        return getInsertOrDeleteRuntime(IndexOperation.DELETE, dataSource, propagatedSchema, typeEnv, keys, payload,
 +                additionalNonKeyFields, recordDesc, context, spec, false, null);
 +    }
 +
 +    public Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> getIndexInsertOrDeleteRuntime(
 +            IndexOperation indexOp, IDataSourceIndex<String, AqlSourceId> dataSourceIndex,
 +            IOperatorSchema propagatedSchema, IOperatorSchema[] inputSchemas, IVariableTypeEnvironment typeEnv,
 +            List<LogicalVariable> primaryKeys, List<LogicalVariable> secondaryKeys,
 +            List<LogicalVariable> additionalNonKeyFields, ILogicalExpression filterExpr, RecordDescriptor recordDesc,
 +            JobGenContext context, JobSpecification spec, boolean bulkload) throws AlgebricksException {
 +        String indexName = dataSourceIndex.getId();
 +        String dataverseName = dataSourceIndex.getDataSource().getId().getDataverseName();
 +        String datasetName = dataSourceIndex.getDataSource().getId().getDatasourceName();
 +
 +        Dataset dataset = findDataset(dataverseName, datasetName);
 +        if (dataset == null) {
 +            throw new AlgebricksException("Unknown dataset " + datasetName);
 +        }
 +        Index secondaryIndex;
 +        try {
 +            secondaryIndex = MetadataManager.INSTANCE.getIndex(mdTxnCtx, dataset.getDataverseName(),
 +                    dataset.getDatasetName(), indexName);
 +        } catch (MetadataException e) {
 +            throw new AlgebricksException(e);
 +        }
 +        AsterixTupleFilterFactory filterFactory = createTupleFilterFactory(inputSchemas, typeEnv, filterExpr, context);
 +        switch (secondaryIndex.getIndexType()) {
 +            case BTREE: {
 +                return getBTreeDmlRuntime(dataverseName, datasetName, indexName, propagatedSchema, typeEnv, primaryKeys,
 +                        secondaryKeys, additionalNonKeyFields, filterFactory, recordDesc, context, spec, indexOp,
 +                        bulkload);
 +            }
 +            case RTREE: {
 +                return getRTreeDmlRuntime(dataverseName, datasetName, indexName, propagatedSchema, typeEnv, primaryKeys,
 +                        secondaryKeys, additionalNonKeyFields, filterFactory, recordDesc, context, spec, indexOp,
 +                        bulkload);
 +            }
 +            case SINGLE_PARTITION_WORD_INVIX:
 +            case SINGLE_PARTITION_NGRAM_INVIX:
 +            case LENGTH_PARTITIONED_WORD_INVIX:
 +            case LENGTH_PARTITIONED_NGRAM_INVIX: {
 +                return getInvertedIndexDmlRuntime(dataverseName, datasetName, indexName, propagatedSchema, typeEnv,
 +                        primaryKeys, secondaryKeys, additionalNonKeyFields, filterFactory, recordDesc, context, spec,
 +                        indexOp, secondaryIndex.getIndexType(), bulkload);
 +            }
 +            default: {
 +                throw new AlgebricksException(
 +                        "Insert and delete not implemented for index type: " + secondaryIndex.getIndexType());
 +            }
 +        }
 +    }
 +
 +    @Override
 +    public Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> getIndexInsertRuntime(
 +            IDataSourceIndex<String, AqlSourceId> dataSourceIndex, IOperatorSchema propagatedSchema,
 +            IOperatorSchema[] inputSchemas, IVariableTypeEnvironment typeEnv, List<LogicalVariable> primaryKeys,
 +            List<LogicalVariable> secondaryKeys, List<LogicalVariable> additionalNonKeyFields,
 +            ILogicalExpression filterExpr, RecordDescriptor recordDesc, JobGenContext context, JobSpecification spec,
 +            boolean bulkload) throws AlgebricksException {
 +        return getIndexInsertOrDeleteRuntime(IndexOperation.INSERT, dataSourceIndex, propagatedSchema, inputSchemas,
 +                typeEnv, primaryKeys, secondaryKeys, additionalNonKeyFields, filterExpr, recordDesc, context, spec,
 +                bulkload);
 +    }
 +
 +    @Override
 +    public Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> getTokenizerRuntime(
 +            IDataSourceIndex<String, AqlSourceId> dataSourceIndex, IOperatorSchema propagatedSchema,
 +            IOperatorSchema[] inputSchemas, IVariableTypeEnvironment typeEnv, List<LogicalVariable> primaryKeys,
 +            List<LogicalVariable> secondaryKeys, ILogicalExpression filterExpr, RecordDescriptor recordDesc,
 +            JobGenContext context, JobSpecification spec, boolean bulkload) throws AlgebricksException {
 +
 +        String indexName = dataSourceIndex.getId();
 +        String dataverseName = dataSourceIndex.getDataSource().getId().getDataverseName();
 +        String datasetName = dataSourceIndex.getDataSource().getId().getDatasourceName();
 +
 +        IOperatorSchema inputSchema = new OperatorSchemaImpl();
 +        if (inputSchemas.length > 0) {
 +            inputSchema = inputSchemas[0];
 +        } else {
 +            throw new AlgebricksException("TokenizeOperator can not operate without any input variable.");
 +        }
 +
 +        Dataset dataset = findDataset(dataverseName, datasetName);
 +        if (dataset == null) {
 +            throw new AlgebricksException("Unknown dataset " + datasetName);
 +        }
 +        Index secondaryIndex;
 +        try {
 +            secondaryIndex = MetadataManager.INSTANCE.getIndex(mdTxnCtx, dataset.getDataverseName(),
 +                    dataset.getDatasetName(), indexName);
 +        } catch (MetadataException e) {
 +            throw new AlgebricksException(e);
 +        }
 +        AsterixTupleFilterFactory filterFactory = createTupleFilterFactory(inputSchemas, typeEnv, filterExpr, context);
 +        // TokenizeOperator only supports a keyword or n-gram index.
 +        switch (secondaryIndex.getIndexType()) {
 +            case SINGLE_PARTITION_WORD_INVIX:
 +            case SINGLE_PARTITION_NGRAM_INVIX:
 +            case LENGTH_PARTITIONED_WORD_INVIX:
 +            case LENGTH_PARTITIONED_NGRAM_INVIX: {
 +                return getBinaryTokenizerRuntime(dataverseName, datasetName, indexName, inputSchema, propagatedSchema,
 +                        typeEnv, primaryKeys, secondaryKeys, filterFactory, recordDesc, context, spec,
 +                        IndexOperation.INSERT, secondaryIndex.getIndexType(), bulkload);
 +            }
 +            default: {
 +                throw new AlgebricksException("Currently, we do not support TokenizeOperator for the index type: "
 +                        + secondaryIndex.getIndexType());
 +            }
 +        }
 +
 +    }
 +
 +    // Get a Tokenizer for the bulk-loading data into a n-gram or keyword index.
 +    private Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> getBinaryTokenizerRuntime(String dataverseName,
 +            String datasetName, String indexName, IOperatorSchema inputSchema, IOperatorSchema propagatedSchema,
 +            IVariableTypeEnvironment typeEnv, List<LogicalVariable> primaryKeys, List<LogicalVariable> secondaryKeys,
 +            AsterixTupleFilterFactory filterFactory, RecordDescriptor recordDesc, JobGenContext context,
 +            JobSpecification spec, IndexOperation indexOp, IndexType indexType, boolean bulkload)
 +            throws AlgebricksException {
 +
 +        // Sanity checks.
 +        if (primaryKeys.size() > 1) {
 +            throw new AlgebricksException("Cannot tokenize composite primary key.");
 +        }
 +        if (secondaryKeys.size() > 1) {
 +            throw new AlgebricksException("Cannot tokenize composite secondary key fields.");
 +        }
 +
 +        boolean isPartitioned;
 +        if (indexType == IndexType.LENGTH_PARTITIONED_WORD_INVIX
 +                || indexType == IndexType.LENGTH_PARTITIONED_NGRAM_INVIX) {
 +            isPartitioned = true;
 +        } else {
 +            isPartitioned = false;
 +        }
 +
 +        // Number of Keys that needs to be propagated
 +        int numKeys = inputSchema.getSize();
 +
 +        // Get the rest of Logical Variables that are not (PK or SK) and each
 +        // variable's positions.
 +        // These variables will be propagated through TokenizeOperator.
 +        List<LogicalVariable> otherKeys = new ArrayList<LogicalVariable>();
 +        if (inputSchema.getSize() > 0) {
 +            for (int k = 0; k < inputSchema.getSize(); k++) {
 +                boolean found = false;
 +                for (LogicalVariable varKey : primaryKeys) {
 +                    if (varKey.equals(inputSchema.getVariable(k))) {
 +                        found = true;
 +                        break;
 +                    } else {
 +                        found = false;
 +                    }
 +                }
 +                if (!found) {
 +                    for (LogicalVariable varKey : secondaryKeys) {
 +                        if (varKey.equals(inputSchema.getVariable(k))) {
 +                            found = true;
 +                            break;
 +                        } else {
 +                            found = false;
 +                        }
 +                    }
 +                }
 +                if (!found) {
 +                    otherKeys.add(inputSchema.getVariable(k));
 +                }
 +            }
 +        }
 +
 +        // For tokenization, sorting and loading.
 +        // One token (+ optional partitioning field) + primary keys + secondary
 +        // keys + other variables
 +        // secondary keys and other variables will be just passed to the
 +        // IndexInsertDelete Operator.
 +        int numTokenKeyPairFields = (!isPartitioned) ? 1 + numKeys : 2 + numKeys;
 +
 +        // generate field permutations for the input
 +        int[] fieldPermutation = new int[numKeys];
 +
 +        int[] modificationCallbackPrimaryKeyFields = new int[primaryKeys.size()];
 +        int i = 0;
 +        int j = 0;
 +        for (LogicalVariable varKey : primaryKeys) {
 +            int idx = propagatedSchema.findVariable(varKey);
 +            fieldPermutation[i] = idx;
 +            modificationCallbackPrimaryKeyFields[j] = i;
 +            i++;
 +            j++;
 +        }
 +        for (LogicalVariable varKey : otherKeys) {
 +            int idx = propagatedSchema.findVariable(varKey);
 +            fieldPermutation[i] = idx;
 +            i++;
 +        }
 +        for (LogicalVariable varKey : secondaryKeys) {
 +            int idx = propagatedSchema.findVariable(varKey);
 +            fieldPermutation[i] = idx;
 +            i++;
 +        }
 +
 +        Dataset dataset = findDataset(dataverseName, datasetName);
 +        if (dataset == null) {
 +            throw new AlgebricksException("Unknown dataset " + datasetName + " in dataverse " + dataverseName);
 +        }
 +        String itemTypeName = dataset.getItemTypeName();
 +        IAType itemType;
 +        try {
 +            itemType = MetadataManager.INSTANCE.getDatatype(mdTxnCtx, dataset.getItemTypeDataverseName(), itemTypeName)
 +                    .getDatatype();
 +
 +            if (itemType.getTypeTag() != ATypeTag.RECORD) {
 +                throw new AlgebricksException("Only record types can be tokenized.");
 +            }
 +
 +            ARecordType recType = (ARecordType) itemType;
 +
 +            // Index parameters.
 +            Index secondaryIndex = MetadataManager.INSTANCE.getIndex(mdTxnCtx, dataset.getDataverseName(),
 +                    dataset.getDatasetName(), indexName);
 +
 +            List<List<String>> secondaryKeyExprs = secondaryIndex.getKeyFieldNames();
 +            List<IAType> secondaryKeyTypeEntries = secondaryIndex.getKeyFieldTypes();
 +
 +            int numTokenFields = (!isPartitioned) ? secondaryKeys.size() : secondaryKeys.size() + 1;
 +            ITypeTraits[] tokenTypeTraits = new ITypeTraits[numTokenFields];
 +            ITypeTraits[] invListsTypeTraits = new ITypeTraits[primaryKeys.size()];
 +
 +            // Find the key type of the secondary key. If it's a derived type,
 +            // return the derived type.
 +            // e.g. UNORDERED LIST -> return UNORDERED LIST type
 +            IAType secondaryKeyType = null;
 +            Pair<IAType, Boolean> keyPairType = Index.getNonNullableOpenFieldType(secondaryKeyTypeEntries.get(0),
 +                    secondaryKeyExprs.get(0), recType);
 +            secondaryKeyType = keyPairType.first;
 +            List<List<String>> partitioningKeys = DatasetUtils.getPartitioningKeys(dataset);
 +            i = 0;
 +            for (List<String> partitioningKey : partitioningKeys) {
 +                IAType keyType = recType.getSubFieldType(partitioningKey);
 +                invListsTypeTraits[i] = AqlTypeTraitProvider.INSTANCE.getTypeTrait(keyType);
 +                ++i;
 +            }
 +
 +            tokenTypeTraits[0] = NonTaggedFormatUtil.getTokenTypeTrait(secondaryKeyType);
 +            if (isPartitioned) {
 +                // The partitioning field is hardcoded to be a short *without*
 +                // an Asterix type tag.
 +                tokenTypeTraits[1] = ShortPointable.TYPE_TRAITS;
 +            }
 +
 +            IBinaryTokenizerFactory tokenizerFactory = NonTaggedFormatUtil.getBinaryTokenizerFactory(
 +                    secondaryKeyType.getTypeTag(), indexType, secondaryIndex.getGramLength());
 +
 +            P

<TRUNCATED>


[03/50] [abbrv] incubator-asterixdb git commit: Exclude Temporary Indexes From Replication

Posted by im...@apache.org.
Exclude Temporary Indexes From Replication

- Exclude non-durable LSM indexes from replication.
- Stop heartbeat task after NC has stopped to avoid false failures detection.

Change-Id: Icce91a203e04cb068a7a5aa541720bbd0289eacb
Reviewed-on: https://asterix-gerrit.ics.uci.edu/777
Tested-by: Jenkins <je...@fulliautomatix.ics.uci.edu>
Reviewed-by: abdullah alamoudi <ba...@gmail.com>


Project: http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/commit/08aa051a
Tree: http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/tree/08aa051a
Diff: http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/diff/08aa051a

Branch: refs/heads/master
Commit: 08aa051a34d5c2b757cc0e0b2e018725e164d0f7
Parents: b2e9d08
Author: Murtadha Hubail <mh...@uci.edu>
Authored: Sat Apr 2 20:55:44 2016 -0700
Committer: Murtadha Hubail <hu...@gmail.com>
Committed: Sun Apr 3 00:18:32 2016 -0700

----------------------------------------------------------------------
 .../control/nc/NodeControllerService.java       |  9 +++++--
 .../storage/am/lsm/common/api/ILSMIndex.java    |  6 +++++
 .../am/lsm/common/impls/AbstractLSMIndex.java   | 25 +++++++++++---------
 .../storage/am/lsm/common/impls/LSMHarness.java |  3 ++-
 4 files changed, 29 insertions(+), 14 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/08aa051a/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/NodeControllerService.java
----------------------------------------------------------------------
diff --git a/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/NodeControllerService.java b/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/NodeControllerService.java
index 7b5758c..598d6db 100644
--- a/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/NodeControllerService.java
+++ b/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/NodeControllerService.java
@@ -338,12 +338,17 @@ public class NodeControllerService implements IControllerService {
             }
             partitionManager.close();
             datasetPartitionManager.close();
-            heartbeatTask.cancel();
             netManager.stop();
             datasetNetworkManager.stop();
             queue.stop();
-            if (ncAppEntryPoint != null)
+            if (ncAppEntryPoint != null) {
                 ncAppEntryPoint.stop();
+            }
+            /**
+             * Stop heartbeat after NC has stopped to avoid false node failure detection
+             * on CC if an NC takes a long time to stop.
+             */
+            heartbeatTask.cancel();
             LOGGER.log(Level.INFO, "Stopped NodeControllerService");
             shuttedDown = true;
         }

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/08aa051a/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/api/ILSMIndex.java
----------------------------------------------------------------------
diff --git a/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/api/ILSMIndex.java b/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/api/ILSMIndex.java
index c1cef2d..11b933d 100644
--- a/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/api/ILSMIndex.java
+++ b/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/api/ILSMIndex.java
@@ -39,6 +39,7 @@ public interface ILSMIndex extends IIndex {
 
     public void deactivate(boolean flushOnExit) throws HyracksDataException;
 
+    @Override
     public ILSMIndexAccessor createAccessor(IModificationOperationCallback modificationCallback,
             ISearchOperationCallback searchCallback) throws HyracksDataException;
 
@@ -51,4 +52,9 @@ public interface ILSMIndex extends IIndex {
     public List<ILSMComponent> getImmutableComponents();
 
     public boolean isPrimaryIndex();
+
+    /**
+     * @return true if the index is durable. Otherwise false.
+     */
+    public boolean isDurable();
 }

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/08aa051a/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/AbstractLSMIndex.java
----------------------------------------------------------------------
diff --git a/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/AbstractLSMIndex.java b/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/AbstractLSMIndex.java
index 441dda1..440ad31 100644
--- a/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/AbstractLSMIndex.java
+++ b/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/AbstractLSMIndex.java
@@ -33,7 +33,6 @@ import org.apache.hyracks.api.replication.IReplicationJob.ReplicationExecutionTy
 import org.apache.hyracks.api.replication.IReplicationJob.ReplicationOperation;
 import org.apache.hyracks.storage.am.bloomfilter.impls.BloomFilter;
 import org.apache.hyracks.storage.am.common.api.ITreeIndex;
-import org.apache.hyracks.storage.am.common.api.ITreeIndexMetaDataFrame;
 import org.apache.hyracks.storage.am.lsm.common.api.ILSMComponent;
 import org.apache.hyracks.storage.am.lsm.common.api.ILSMComponent.ComponentState;
 import org.apache.hyracks.storage.am.lsm.common.api.ILSMComponentFilterFrameFactory;
@@ -48,8 +47,6 @@ import org.apache.hyracks.storage.am.lsm.common.api.ILSMOperationTracker;
 import org.apache.hyracks.storage.am.lsm.common.api.IVirtualBufferCache;
 import org.apache.hyracks.storage.am.lsm.common.api.LSMOperationType;
 import org.apache.hyracks.storage.common.buffercache.IBufferCache;
-import org.apache.hyracks.storage.common.buffercache.ICachedPage;
-import org.apache.hyracks.storage.common.file.BufferedFileHandle;
 import org.apache.hyracks.storage.common.file.IFileMapProvider;
 
 public abstract class AbstractLSMIndex implements ILSMIndexInternal {
@@ -80,11 +77,10 @@ public abstract class AbstractLSMIndex implements ILSMIndexInternal {
     protected boolean memoryComponentsAllocated = false;
 
     public AbstractLSMIndex(List<IVirtualBufferCache> virtualBufferCaches, IBufferCache diskBufferCache,
-            ILSMIndexFileManager fileManager, IFileMapProvider diskFileMapProvider,
-            double bloomFilterFalsePositiveRate, ILSMMergePolicy mergePolicy, ILSMOperationTracker opTracker,
-            ILSMIOOperationScheduler ioScheduler, ILSMIOOperationCallback ioOpCallback,
-            ILSMComponentFilterFrameFactory filterFrameFactory, LSMComponentFilterManager filterManager,
-            int[] filterFields, boolean durable) {
+            ILSMIndexFileManager fileManager, IFileMapProvider diskFileMapProvider, double bloomFilterFalsePositiveRate,
+            ILSMMergePolicy mergePolicy, ILSMOperationTracker opTracker, ILSMIOOperationScheduler ioScheduler,
+            ILSMIOOperationCallback ioOpCallback, ILSMComponentFilterFrameFactory filterFrameFactory,
+            LSMComponentFilterManager filterManager, int[] filterFields, boolean durable) {
         this.virtualBufferCaches = virtualBufferCaches;
         this.diskBufferCache = diskBufferCache;
         this.diskFileMapProvider = diskFileMapProvider;
@@ -149,8 +145,8 @@ public abstract class AbstractLSMIndex implements ILSMIndexInternal {
     }
 
     protected void markAsValidInternal(IBufferCache bufferCache, BloomFilter filter) throws HyracksDataException {
-        if(durable){
-            bufferCache.force(filter.getFileId(),true);
+        if (durable) {
+            bufferCache.force(filter.getFileId(), true);
         }
     }
 
@@ -208,7 +204,7 @@ public abstract class AbstractLSMIndex implements ILSMIndexInternal {
         return diskBufferCache;
     }
 
-    public boolean isEmptyIndex() throws HyracksDataException {
+    public boolean isEmptyIndex() {
         boolean isModified = false;
         for (ILSMComponent c : memoryComponents) {
             AbstractMemoryLSMComponent mutableComponent = (AbstractMemoryLSMComponent) c;
@@ -288,9 +284,16 @@ public abstract class AbstractLSMIndex implements ILSMIndexInternal {
         }
     }
 
+    @Override
     public abstract void allocateMemoryComponents() throws HyracksDataException;
 
+    @Override
     public boolean isMemoryComponentsAllocated() {
         return memoryComponentsAllocated;
     }
+
+    @Override
+    public boolean isDurable() {
+        return durable;
+    }
 }

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/08aa051a/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/LSMHarness.java
----------------------------------------------------------------------
diff --git a/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/LSMHarness.java b/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/LSMHarness.java
index 21b0d8a..0224c5c 100644
--- a/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/LSMHarness.java
+++ b/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/LSMHarness.java
@@ -63,7 +63,8 @@ public class LSMHarness implements ILSMHarness {
         this.opTracker = opTracker;
         this.mergePolicy = mergePolicy;
         fullMergeIsRequested = new AtomicBoolean();
-        this.replicationEnabled = replicationEnabled;
+        //only durable indexes are replicated
+        this.replicationEnabled = replicationEnabled && lsmIndex.isDurable();
         if (replicationEnabled) {
             this.componentsToBeReplicated = new ArrayList<ILSMComponent>();
         }


[35/50] [abbrv] incubator-asterixdb git commit: Merge branch 'master' into hyracks-merge2

Posted by im...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/translator/LangExpressionToPlanTranslator.java
----------------------------------------------------------------------
diff --cc asterixdb/asterix-algebra/src/main/java/org/apache/asterix/translator/LangExpressionToPlanTranslator.java
index 49e9399,0000000..9cbc88a
mode 100644,000000..100644
--- a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/translator/LangExpressionToPlanTranslator.java
+++ b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/translator/LangExpressionToPlanTranslator.java
@@@ -1,1514 -1,0 +1,1534 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +package org.apache.asterix.translator;
 +
 +import java.io.File;
 +import java.io.IOException;
 +import java.util.ArrayList;
 +import java.util.Collections;
 +import java.util.HashMap;
 +import java.util.HashSet;
 +import java.util.Iterator;
 +import java.util.List;
 +import java.util.Map;
 +import java.util.Set;
 +import java.util.concurrent.atomic.AtomicLong;
 +
 +import org.apache.asterix.algebra.base.ILangExpressionToPlanTranslator;
 +import org.apache.asterix.common.config.AsterixMetadataProperties;
 +import org.apache.asterix.common.config.DatasetConfig.DatasetType;
 +import org.apache.asterix.common.exceptions.AsterixException;
 +import org.apache.asterix.common.functions.FunctionConstants;
 +import org.apache.asterix.common.functions.FunctionSignature;
 +import org.apache.asterix.lang.aql.util.RangeMapBuilder;
 +import org.apache.asterix.lang.common.base.Expression;
 +import org.apache.asterix.lang.common.base.Expression.Kind;
 +import org.apache.asterix.lang.common.clause.GroupbyClause;
 +import org.apache.asterix.lang.common.clause.LetClause;
 +import org.apache.asterix.lang.common.clause.LimitClause;
 +import org.apache.asterix.lang.common.clause.OrderbyClause;
 +import org.apache.asterix.lang.common.clause.OrderbyClause.OrderModifier;
 +import org.apache.asterix.lang.common.clause.WhereClause;
 +import org.apache.asterix.lang.common.expression.CallExpr;
 +import org.apache.asterix.lang.common.expression.FieldAccessor;
 +import org.apache.asterix.lang.common.expression.FieldBinding;
 +import org.apache.asterix.lang.common.expression.GbyVariableExpressionPair;
 +import org.apache.asterix.lang.common.expression.IfExpr;
 +import org.apache.asterix.lang.common.expression.IndexAccessor;
 +import org.apache.asterix.lang.common.expression.ListConstructor;
 +import org.apache.asterix.lang.common.expression.ListConstructor.Type;
 +import org.apache.asterix.lang.common.expression.LiteralExpr;
 +import org.apache.asterix.lang.common.expression.OperatorExpr;
 +import org.apache.asterix.lang.common.expression.QuantifiedExpression;
 +import org.apache.asterix.lang.common.expression.QuantifiedExpression.Quantifier;
 +import org.apache.asterix.lang.common.expression.RecordConstructor;
 +import org.apache.asterix.lang.common.expression.UnaryExpr;
 +import org.apache.asterix.lang.common.expression.UnaryExpr.Sign;
 +import org.apache.asterix.lang.common.expression.VariableExpr;
 +import org.apache.asterix.lang.common.literal.StringLiteral;
 +import org.apache.asterix.lang.common.statement.FunctionDecl;
 +import org.apache.asterix.lang.common.statement.Query;
 +import org.apache.asterix.lang.common.struct.Identifier;
 +import org.apache.asterix.lang.common.struct.OperatorType;
 +import org.apache.asterix.lang.common.struct.QuantifiedPair;
 +import org.apache.asterix.lang.common.util.FunctionUtil;
 +import org.apache.asterix.lang.common.visitor.base.AbstractQueryExpressionVisitor;
 +import org.apache.asterix.metadata.MetadataException;
 +import org.apache.asterix.metadata.MetadataManager;
 +import org.apache.asterix.metadata.declared.AqlDataSource.AqlDataSourceType;
 +import org.apache.asterix.metadata.declared.AqlMetadataProvider;
 +import org.apache.asterix.metadata.declared.AqlSourceId;
 +import org.apache.asterix.metadata.declared.DatasetDataSource;
 +import org.apache.asterix.metadata.declared.LoadableDataSource;
 +import org.apache.asterix.metadata.declared.ResultSetDataSink;
 +import org.apache.asterix.metadata.declared.ResultSetSinkId;
 +import org.apache.asterix.metadata.entities.Dataset;
 +import org.apache.asterix.metadata.entities.Function;
 +import org.apache.asterix.metadata.entities.InternalDatasetDetails;
 +import org.apache.asterix.metadata.feeds.FeedMetadataUtil;
 +import org.apache.asterix.metadata.functions.ExternalFunctionCompilerUtil;
 +import org.apache.asterix.metadata.utils.DatasetUtils;
 +import org.apache.asterix.om.base.AString;
 +import org.apache.asterix.om.constants.AsterixConstantValue;
 +import org.apache.asterix.om.functions.AsterixBuiltinFunctions;
 +import org.apache.asterix.om.functions.AsterixFunctionInfo;
 +import org.apache.asterix.om.types.ARecordType;
 +import org.apache.asterix.om.types.IAType;
 +import org.apache.asterix.om.util.AsterixAppContextInfo;
 +import org.apache.asterix.runtime.formats.FormatUtils;
 +import org.apache.asterix.translator.CompiledStatements.CompiledLoadFromFileStatement;
 +import org.apache.asterix.translator.CompiledStatements.CompiledSubscribeFeedStatement;
 +import org.apache.asterix.translator.CompiledStatements.ICompiledDmlStatement;
 +import org.apache.asterix.translator.util.PlanTranslationUtil;
 +import org.apache.commons.lang3.mutable.Mutable;
 +import org.apache.commons.lang3.mutable.MutableObject;
 +import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
 +import org.apache.hyracks.algebricks.common.exceptions.NotImplementedException;
 +import org.apache.hyracks.algebricks.common.utils.Pair;
 +import org.apache.hyracks.algebricks.core.algebra.base.Counter;
 +import org.apache.hyracks.algebricks.core.algebra.base.ILogicalExpression;
 +import org.apache.hyracks.algebricks.core.algebra.base.ILogicalOperator;
 +import org.apache.hyracks.algebricks.core.algebra.base.ILogicalPlan;
 +import org.apache.hyracks.algebricks.core.algebra.base.LogicalExpressionTag;
 +import org.apache.hyracks.algebricks.core.algebra.base.LogicalOperatorTag;
 +import org.apache.hyracks.algebricks.core.algebra.base.LogicalVariable;
 +import org.apache.hyracks.algebricks.core.algebra.base.OperatorAnnotations;
 +import org.apache.hyracks.algebricks.core.algebra.expressions.AbstractFunctionCallExpression;
 +import org.apache.hyracks.algebricks.core.algebra.expressions.AbstractFunctionCallExpression.FunctionKind;
 +import org.apache.hyracks.algebricks.core.algebra.expressions.AggregateFunctionCallExpression;
 +import org.apache.hyracks.algebricks.core.algebra.expressions.BroadcastExpressionAnnotation;
 +import org.apache.hyracks.algebricks.core.algebra.expressions.BroadcastExpressionAnnotation.BroadcastSide;
 +import org.apache.hyracks.algebricks.core.algebra.expressions.ConstantExpression;
 +import org.apache.hyracks.algebricks.core.algebra.expressions.IExpressionAnnotation;
 +import org.apache.hyracks.algebricks.core.algebra.expressions.ScalarFunctionCallExpression;
 +import org.apache.hyracks.algebricks.core.algebra.expressions.UnnestingFunctionCallExpression;
 +import org.apache.hyracks.algebricks.core.algebra.expressions.VariableReferenceExpression;
 +import org.apache.hyracks.algebricks.core.algebra.functions.AlgebricksBuiltinFunctions;
 +import org.apache.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
 +import org.apache.hyracks.algebricks.core.algebra.functions.IFunctionInfo;
 +import org.apache.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator;
 +import org.apache.hyracks.algebricks.core.algebra.operators.logical.AbstractOperatorWithNestedPlans;
 +import org.apache.hyracks.algebricks.core.algebra.operators.logical.AggregateOperator;
 +import org.apache.hyracks.algebricks.core.algebra.operators.logical.AssignOperator;
 +import org.apache.hyracks.algebricks.core.algebra.operators.logical.DataSourceScanOperator;
 +import org.apache.hyracks.algebricks.core.algebra.operators.logical.DistributeResultOperator;
 +import org.apache.hyracks.algebricks.core.algebra.operators.logical.EmptyTupleSourceOperator;
 +import org.apache.hyracks.algebricks.core.algebra.operators.logical.GroupByOperator;
 +import org.apache.hyracks.algebricks.core.algebra.operators.logical.InsertDeleteUpsertOperator;
 +import org.apache.hyracks.algebricks.core.algebra.operators.logical.LimitOperator;
 +import org.apache.hyracks.algebricks.core.algebra.operators.logical.NestedTupleSourceOperator;
 +import org.apache.hyracks.algebricks.core.algebra.operators.logical.OrderOperator;
 +import org.apache.hyracks.algebricks.core.algebra.operators.logical.OrderOperator.IOrder;
 +import org.apache.hyracks.algebricks.core.algebra.operators.logical.OrderOperator.IOrder.OrderKind;
 +import org.apache.hyracks.algebricks.core.algebra.operators.logical.ProjectOperator;
 +import org.apache.hyracks.algebricks.core.algebra.operators.logical.SelectOperator;
 +import org.apache.hyracks.algebricks.core.algebra.operators.logical.SinkOperator;
 +import org.apache.hyracks.algebricks.core.algebra.operators.logical.SubplanOperator;
 +import org.apache.hyracks.algebricks.core.algebra.operators.logical.UnnestOperator;
 +import org.apache.hyracks.algebricks.core.algebra.operators.logical.visitors.LogicalOperatorDeepCopyWithNewVariablesVisitor;
 +import org.apache.hyracks.algebricks.core.algebra.operators.logical.visitors.VariableUtilities;
 +import org.apache.hyracks.algebricks.core.algebra.plan.ALogicalPlanImpl;
 +import org.apache.hyracks.algebricks.core.algebra.properties.INodeDomain;
 +import org.apache.hyracks.algebricks.core.algebra.properties.LocalOrderProperty;
 +import org.apache.hyracks.algebricks.core.algebra.properties.OrderColumn;
 +import org.apache.hyracks.algebricks.core.algebra.util.OperatorPropertiesUtil;
 +import org.apache.hyracks.api.io.FileReference;
 +import org.apache.hyracks.dataflow.std.file.FileSplit;
 +
 +/**
 + * Each visit returns a pair of an operator and a variable. The variable
 + * corresponds to the new column, if any, added to the tuple flow. E.g., for
 + * Unnest, the column is the variable bound to the elements in the list, for
 + * Subplan it is null. The first argument of a visit method is the expression
 + * which is translated. The second argument of a visit method is the tuple
 + * source for the current subtree.
 + */
 +
 +class LangExpressionToPlanTranslator
 +        extends AbstractQueryExpressionVisitor<Pair<ILogicalOperator, LogicalVariable>, Mutable<ILogicalOperator>>
 +        implements ILangExpressionToPlanTranslator {
 +
 +    protected final AqlMetadataProvider metadataProvider;
 +    protected final TranslationContext context;
 +    private static final AtomicLong outputFileID = new AtomicLong(0);
 +    private static final String OUTPUT_FILE_PREFIX = "OUTPUT_";
 +
 +    public LangExpressionToPlanTranslator(AqlMetadataProvider metadataProvider, int currentVarCounter)
 +            throws AlgebricksException {
 +        this.context = new TranslationContext(new Counter(currentVarCounter));
 +        this.metadataProvider = metadataProvider;
 +        FormatUtils.getDefaultFormat().registerRuntimeFunctions();
 +    }
 +
 +    @Override
 +    public int getVarCounter() {
 +        return context.getVarCounter();
 +    }
 +
 +    @Override
 +    public ILogicalPlan translateLoad(ICompiledDmlStatement stmt) throws AlgebricksException {
 +        CompiledLoadFromFileStatement clffs = (CompiledLoadFromFileStatement) stmt;
 +        Dataset dataset = metadataProvider.findDataset(clffs.getDataverseName(), clffs.getDatasetName());
 +        if (dataset == null) {
 +            // This would never happen since we check for this in AqlTranslator
 +            throw new AlgebricksException(
 +                    "Unable to load dataset " + clffs.getDatasetName() + " since it does not exist");
 +        }
 +        IAType itemType = metadataProvider.findType(dataset.getItemTypeDataverseName(), dataset.getItemTypeName());
 +        IAType metaItemType = metadataProvider.findType(dataset.getMetaItemTypeDataverseName(),
 +                dataset.getMetaItemTypeName());
 +        DatasetDataSource targetDatasource = validateDatasetInfo(metadataProvider, stmt.getDataverseName(),
 +                stmt.getDatasetName());
 +        List<List<String>> partitionKeys = DatasetUtils.getPartitioningKeys(targetDatasource.getDataset());
 +        if (dataset.hasMetaPart()) {
 +            throw new AlgebricksException(
 +                    dataset.getDatasetName() + ": load dataset is not supported on Datasets with Meta records");
 +        }
 +
 +        LoadableDataSource lds;
 +        try {
 +            lds = new LoadableDataSource(dataset, itemType, metaItemType, clffs.getAdapter(), clffs.getProperties());
 +        } catch (IOException e) {
 +            throw new AlgebricksException(e);
 +        }
 +
 +        // etsOp is a dummy input operator used to keep the compiler happy. it
 +        // could be removed but would result in
 +        // the need to fix many rewrite rules that assume that datasourcescan
 +        // operators always have input.
 +        ILogicalOperator etsOp = new EmptyTupleSourceOperator();
 +
 +        // Add a logical variable for the record.
 +        List<LogicalVariable> payloadVars = new ArrayList<LogicalVariable>();
 +        payloadVars.add(context.newVar());
 +
 +        // Create a scan operator and make the empty tuple source its input
 +        DataSourceScanOperator dssOp = new DataSourceScanOperator(payloadVars, lds);
 +        dssOp.getInputs().add(new MutableObject<ILogicalOperator>(etsOp));
 +        ILogicalExpression payloadExpr = new VariableReferenceExpression(payloadVars.get(0));
 +        Mutable<ILogicalExpression> payloadRef = new MutableObject<ILogicalExpression>(payloadExpr);
 +
 +        // Creating the assign to extract the PK out of the record
 +        ArrayList<LogicalVariable> pkVars = new ArrayList<LogicalVariable>();
 +        ArrayList<Mutable<ILogicalExpression>> pkExprs = new ArrayList<Mutable<ILogicalExpression>>();
 +        List<Mutable<ILogicalExpression>> varRefsForLoading = new ArrayList<Mutable<ILogicalExpression>>();
 +        LogicalVariable payloadVar = payloadVars.get(0);
 +        for (List<String> keyFieldName : partitionKeys) {
 +            PlanTranslationUtil.prepareVarAndExpression(keyFieldName, payloadVar, pkVars, pkExprs, varRefsForLoading,
 +                    context);
 +        }
 +
 +        AssignOperator assign = new AssignOperator(pkVars, pkExprs);
 +        assign.getInputs().add(new MutableObject<ILogicalOperator>(dssOp));
 +
 +        // If the input is pre-sorted, we set the ordering property explicitly in the assign
 +        if (clffs.alreadySorted()) {
 +            List<OrderColumn> orderColumns = new ArrayList<OrderColumn>();
 +            for (int i = 0; i < pkVars.size(); ++i) {
 +                orderColumns.add(new OrderColumn(pkVars.get(i), OrderKind.ASC));
 +            }
 +            assign.setExplicitOrderingProperty(new LocalOrderProperty(orderColumns));
 +        }
 +
 +        List<String> additionalFilteringField = DatasetUtils.getFilterField(targetDatasource.getDataset());
 +        List<LogicalVariable> additionalFilteringVars = null;
 +        List<Mutable<ILogicalExpression>> additionalFilteringAssignExpressions = null;
 +        List<Mutable<ILogicalExpression>> additionalFilteringExpressions = null;
 +        AssignOperator additionalFilteringAssign = null;
 +        if (additionalFilteringField != null) {
 +            additionalFilteringVars = new ArrayList<LogicalVariable>();
 +            additionalFilteringAssignExpressions = new ArrayList<Mutable<ILogicalExpression>>();
 +            additionalFilteringExpressions = new ArrayList<Mutable<ILogicalExpression>>();
 +            PlanTranslationUtil.prepareVarAndExpression(additionalFilteringField, payloadVar, additionalFilteringVars,
 +                    additionalFilteringAssignExpressions, additionalFilteringExpressions, context);
 +            additionalFilteringAssign = new AssignOperator(additionalFilteringVars,
 +                    additionalFilteringAssignExpressions);
 +        }
 +
 +        InsertDeleteUpsertOperator insertOp = new InsertDeleteUpsertOperator(targetDatasource, payloadRef,
 +                varRefsForLoading, InsertDeleteUpsertOperator.Kind.INSERT, true);
 +        insertOp.setAdditionalFilteringExpressions(additionalFilteringExpressions);
 +
 +        if (additionalFilteringAssign != null) {
 +            additionalFilteringAssign.getInputs().add(new MutableObject<ILogicalOperator>(assign));
 +            insertOp.getInputs().add(new MutableObject<ILogicalOperator>(additionalFilteringAssign));
 +        } else {
 +            insertOp.getInputs().add(new MutableObject<ILogicalOperator>(assign));
 +        }
 +
 +        ILogicalOperator leafOperator = new SinkOperator();
 +        leafOperator.getInputs().add(new MutableObject<ILogicalOperator>(insertOp));
 +        return new ALogicalPlanImpl(new MutableObject<ILogicalOperator>(leafOperator));
 +    }
 +
 +    @SuppressWarnings("unchecked")
 +    @Override
 +    public ILogicalPlan translate(Query expr, String outputDatasetName, ICompiledDmlStatement stmt)
 +            throws AlgebricksException, AsterixException {
 +        Pair<ILogicalOperator, LogicalVariable> p = expr.accept(this,
 +                new MutableObject<ILogicalOperator>(new EmptyTupleSourceOperator()));
 +        ArrayList<Mutable<ILogicalOperator>> globalPlanRoots = new ArrayList<Mutable<ILogicalOperator>>();
 +        ILogicalOperator topOp = p.first;
 +        ProjectOperator project = (ProjectOperator) topOp;
 +        LogicalVariable unnestVar = project.getVariables().get(0);
 +        LogicalVariable resVar = project.getVariables().get(0);
 +
 +        if (outputDatasetName == null) {
 +            FileSplit outputFileSplit = metadataProvider.getOutputFile();
 +            if (outputFileSplit == null) {
 +                outputFileSplit = getDefaultOutputFileLocation();
 +            }
 +            metadataProvider.setOutputFile(outputFileSplit);
 +
 +            List<Mutable<ILogicalExpression>> writeExprList = new ArrayList<Mutable<ILogicalExpression>>(1);
 +            writeExprList.add(new MutableObject<ILogicalExpression>(new VariableReferenceExpression(resVar)));
 +            ResultSetSinkId rssId = new ResultSetSinkId(metadataProvider.getResultSetId());
 +            ResultSetDataSink sink = new ResultSetDataSink(rssId, null);
 +            topOp = new DistributeResultOperator(writeExprList, sink);
 +            topOp.getInputs().add(new MutableObject<ILogicalOperator>(project));
 +
 +            // Retrieve the Output RecordType (if any) and store it on
 +            // the DistributeResultOperator
 +            IAType outputRecordType = metadataProvider.findOutputRecordType();
 +            if (outputRecordType != null) {
 +                topOp.getAnnotations().put("output-record-type", outputRecordType);
 +            }
 +        } else {
 +            /**
 +             * add the collection-to-sequence right before the project,
 +             * because dataset only accept non-collection records
 +             */
 +            LogicalVariable seqVar = context.newVar();
 +            /** This assign adds a marker function collection-to-sequence: if the input is a singleton collection, unnest it; otherwise do nothing. */
 +            AssignOperator assignCollectionToSequence = new AssignOperator(seqVar,
 +                    new MutableObject<ILogicalExpression>(new ScalarFunctionCallExpression(
 +                            FunctionUtil.getFunctionInfo(AsterixBuiltinFunctions.COLLECTION_TO_SEQUENCE),
 +                            new MutableObject<ILogicalExpression>(new VariableReferenceExpression(resVar)))));
 +            assignCollectionToSequence.getInputs()
 +                    .add(new MutableObject<ILogicalOperator>(project.getInputs().get(0).getValue()));
 +            project.getInputs().get(0).setValue(assignCollectionToSequence);
 +            project.getVariables().set(0, seqVar);
 +            resVar = seqVar;
 +            DatasetDataSource targetDatasource = validateDatasetInfo(metadataProvider, stmt.getDataverseName(),
 +                    stmt.getDatasetName());
 +            List<Integer> keySourceIndicator = ((InternalDatasetDetails) targetDatasource.getDataset()
 +                    .getDatasetDetails()).getKeySourceIndicator();
 +            ArrayList<LogicalVariable> vars = new ArrayList<LogicalVariable>();
 +            ArrayList<Mutable<ILogicalExpression>> exprs = new ArrayList<Mutable<ILogicalExpression>>();
 +            List<Mutable<ILogicalExpression>> varRefsForLoading = new ArrayList<Mutable<ILogicalExpression>>();
 +            List<List<String>> partitionKeys = DatasetUtils.getPartitioningKeys(targetDatasource.getDataset());
 +            int numOfPrimaryKeys = partitionKeys.size();
 +            for (int i = 0; i < numOfPrimaryKeys; i++) {
 +                if (keySourceIndicator == null || keySourceIndicator.get(i).intValue() == 0) {
 +                    // record part
 +                    PlanTranslationUtil.prepareVarAndExpression(partitionKeys.get(i), resVar, vars, exprs,
 +                            varRefsForLoading, context);
 +                } else {
 +                    // meta part
 +                    PlanTranslationUtil.prepareMetaKeyAccessExpression(partitionKeys.get(i), unnestVar, exprs, vars,
 +                            varRefsForLoading, context);
 +                }
 +            }
 +
 +            AssignOperator assign = new AssignOperator(vars, exprs);
 +            List<String> additionalFilteringField = DatasetUtils.getFilterField(targetDatasource.getDataset());
 +            List<LogicalVariable> additionalFilteringVars = null;
 +            List<Mutable<ILogicalExpression>> additionalFilteringAssignExpressions = null;
 +            List<Mutable<ILogicalExpression>> additionalFilteringExpressions = null;
 +            AssignOperator additionalFilteringAssign = null;
 +            if (additionalFilteringField != null) {
 +                additionalFilteringVars = new ArrayList<LogicalVariable>();
 +                additionalFilteringAssignExpressions = new ArrayList<Mutable<ILogicalExpression>>();
 +                additionalFilteringExpressions = new ArrayList<Mutable<ILogicalExpression>>();
 +
 +                PlanTranslationUtil.prepareVarAndExpression(additionalFilteringField, resVar, additionalFilteringVars,
 +                        additionalFilteringAssignExpressions, additionalFilteringExpressions, context);
 +
 +                additionalFilteringAssign = new AssignOperator(additionalFilteringVars,
 +                        additionalFilteringAssignExpressions);
 +                additionalFilteringAssign.getInputs().add(new MutableObject<ILogicalOperator>(project));
 +                assign.getInputs().add(new MutableObject<ILogicalOperator>(additionalFilteringAssign));
 +            } else {
 +                assign.getInputs().add(new MutableObject<ILogicalOperator>(project));
 +            }
 +
 +            Mutable<ILogicalExpression> varRef = new MutableObject<ILogicalExpression>(
 +                    new VariableReferenceExpression(resVar));
 +            ILogicalOperator leafOperator = null;
 +
 +            switch (stmt.getKind()) {
 +                case INSERT: {
 +                    if (targetDatasource.getDataset().hasMetaPart()) {
 +                        throw new AlgebricksException(targetDatasource.getDataset().getDatasetName()
 +                                + ": insert into dataset is not supported on Datasets with Meta records");
 +                    }
 +                    InsertDeleteUpsertOperator insertOp = new InsertDeleteUpsertOperator(targetDatasource, varRef,
 +                            varRefsForLoading, InsertDeleteUpsertOperator.Kind.INSERT, false);
 +                    insertOp.setAdditionalFilteringExpressions(additionalFilteringExpressions);
 +                    insertOp.getInputs().add(new MutableObject<ILogicalOperator>(assign));
 +                    leafOperator = new SinkOperator();
 +                    leafOperator.getInputs().add(new MutableObject<ILogicalOperator>(insertOp));
 +                    break;
 +                }
 +                case UPSERT: {
 +                    if (targetDatasource.getDataset().hasMetaPart()) {
 +                        throw new AlgebricksException(targetDatasource.getDataset().getDatasetName()
 +                                + ": upsert into dataset is not supported on Datasets with Meta records");
 +                    }
 +                    InsertDeleteUpsertOperator upsertOp = new InsertDeleteUpsertOperator(targetDatasource, varRef,
 +                            varRefsForLoading, InsertDeleteUpsertOperator.Kind.UPSERT, false);
 +                    upsertOp.setAdditionalFilteringExpressions(additionalFilteringExpressions);
 +                    upsertOp.getInputs().add(new MutableObject<ILogicalOperator>(assign));
 +                    // Create and add a new variable used for representing the original record
 +                    ARecordType recordType = (ARecordType) targetDatasource.getItemType();
 +                    upsertOp.setPrevRecordVar(context.newVar());
 +                    upsertOp.setPrevRecordType(recordType);
 +                    if (additionalFilteringField != null) {
 +                        upsertOp.setPrevFilterVar(context.newVar());
 +                        upsertOp.setPrevFilterType(recordType.getFieldType(additionalFilteringField.get(0)));
 +                    }
 +                    leafOperator = new SinkOperator();
 +                    leafOperator.getInputs().add(new MutableObject<ILogicalOperator>(upsertOp));
 +                    break;
 +                }
 +                case DELETE: {
 +                    if (targetDatasource.getDataset().hasMetaPart()) {
 +                        throw new AlgebricksException(targetDatasource.getDataset().getDatasetName()
 +                                + ": delete from dataset is not supported on Datasets with Meta records");
 +                    }
 +                    InsertDeleteUpsertOperator deleteOp = new InsertDeleteUpsertOperator(targetDatasource, varRef,
 +                            varRefsForLoading, InsertDeleteUpsertOperator.Kind.DELETE, false);
 +                    deleteOp.setAdditionalFilteringExpressions(additionalFilteringExpressions);
 +                    deleteOp.getInputs().add(new MutableObject<ILogicalOperator>(assign));
 +                    leafOperator = new SinkOperator();
 +                    leafOperator.getInputs().add(new MutableObject<ILogicalOperator>(deleteOp));
 +                    break;
 +                }
 +                case CONNECT_FEED: {
 +                    InsertDeleteUpsertOperator insertOp = new InsertDeleteUpsertOperator(targetDatasource, varRef,
 +                            varRefsForLoading, InsertDeleteUpsertOperator.Kind.INSERT, false);
 +                    insertOp.setAdditionalFilteringExpressions(additionalFilteringExpressions);
 +                    insertOp.getInputs().add(new MutableObject<ILogicalOperator>(assign));
 +                    leafOperator = new SinkOperator();
 +                    leafOperator.getInputs().add(new MutableObject<ILogicalOperator>(insertOp));
 +                    break;
 +                }
 +                case SUBSCRIBE_FEED: {
 +                    // if the feed is a change feed (i.e, performs different operations), we need to project op variable
 +                    CompiledSubscribeFeedStatement sfs = (CompiledSubscribeFeedStatement) stmt;
 +                    InsertDeleteUpsertOperator feedModificationOp;
 +                    AssignOperator metaAndKeysAssign = null;
 +                    List<LogicalVariable> metaAndKeysVars = null;
 +                    List<Mutable<ILogicalExpression>> metaAndKeysExprs = null;
 +                    List<Mutable<ILogicalExpression>> metaExpSingletonList = null;
 +                    boolean isChangeFeed = FeedMetadataUtil.isChangeFeed(metadataProvider, sfs.getDataverseName(),
 +                            sfs.getFeedName());
 +                    if (targetDatasource.getDataset().hasMetaPart() || isChangeFeed) {
 +                        metaAndKeysVars = new ArrayList<>();
 +                        metaAndKeysExprs = new ArrayList<>();
 +                    }
 +                    if (targetDatasource.getDataset().hasMetaPart()) {
 +                        // add the meta function
 +                        IFunctionInfo finfoMeta = FunctionUtil.getFunctionInfo(AsterixBuiltinFunctions.META);
 +                        ScalarFunctionCallExpression metaFunction = new ScalarFunctionCallExpression(finfoMeta,
 +                                new MutableObject<ILogicalExpression>(new VariableReferenceExpression(unnestVar)));
 +                        // create assign for the meta part
 +                        LogicalVariable metaVar = context.newVar();
 +                        metaExpSingletonList = new ArrayList<>(1);
 +                        metaExpSingletonList
 +                                .add(new MutableObject<ILogicalExpression>(new VariableReferenceExpression(metaVar)));
 +                        metaAndKeysVars.add(metaVar);
 +                        metaAndKeysExprs.add(new MutableObject<ILogicalExpression>(metaFunction));
 +                        project.getVariables().add(metaVar);
 +                    }
 +                    if (isChangeFeed) {
 +                        varRefsForLoading.clear();
 +                        for (Mutable<ILogicalExpression> assignExpr : exprs) {
 +                            if (assignExpr.getValue().getExpressionTag() == LogicalExpressionTag.FUNCTION_CALL) {
 +                                AbstractFunctionCallExpression funcCall = (AbstractFunctionCallExpression) assignExpr
 +                                        .getValue();
 +                                funcCall.substituteVar(resVar, unnestVar);
 +                                LogicalVariable pkVar = context.newVar();
 +                                metaAndKeysVars.add(pkVar);
 +                                metaAndKeysExprs.add(new MutableObject<ILogicalExpression>(assignExpr.getValue()));
 +                                project.getVariables().add(pkVar);
 +                                varRefsForLoading.add(
 +                                        new MutableObject<ILogicalExpression>(new VariableReferenceExpression(pkVar)));
 +                            }
 +                        }
 +                        // A change feed, we don't need the assign to access PKs
 +                        feedModificationOp = new InsertDeleteUpsertOperator(targetDatasource, varRef, varRefsForLoading,
 +                                metaExpSingletonList, InsertDeleteUpsertOperator.Kind.UPSERT, false);
 +                        // Create and add a new variable used for representing the original record
 +                        feedModificationOp.setPrevRecordVar(context.newVar());
 +                        feedModificationOp.setPrevRecordType(targetDatasource.getItemType());
 +                        if (additionalFilteringField != null) {
 +                            feedModificationOp.setPrevFilterVar(context.newVar());
 +                            feedModificationOp.setPrevFilterType(((ARecordType) targetDatasource.getItemType())
 +                                    .getFieldType(additionalFilteringField.get(0)));
 +                            additionalFilteringAssign.getInputs().clear();
 +                            additionalFilteringAssign.getInputs().add(assign.getInputs().get(0));
 +                            feedModificationOp.getInputs().add(new MutableObject<>(additionalFilteringAssign));
 +                        } else {
 +                            feedModificationOp.getInputs().add(assign.getInputs().get(0));
 +                        }
 +                    } else {
 +                        feedModificationOp = new InsertDeleteUpsertOperator(targetDatasource, varRef, varRefsForLoading,
 +                                metaExpSingletonList, InsertDeleteUpsertOperator.Kind.INSERT, false);
 +                        feedModificationOp.getInputs().add(new MutableObject<ILogicalOperator>(assign));
 +                    }
 +                    if (targetDatasource.getDataset().hasMetaPart() || isChangeFeed) {
 +                        metaAndKeysAssign = new AssignOperator(metaAndKeysVars, metaAndKeysExprs);
 +                        metaAndKeysAssign.getInputs().add(project.getInputs().get(0));
 +                        project.getInputs().set(0, new MutableObject<ILogicalOperator>(metaAndKeysAssign));
 +                    }
 +                    feedModificationOp.setAdditionalFilteringExpressions(additionalFilteringExpressions);
 +                    leafOperator = new SinkOperator();
 +                    leafOperator.getInputs().add(new MutableObject<ILogicalOperator>(feedModificationOp));
 +                    break;
 +                }
 +                default:
 +                    break;
 +            }
 +            topOp = leafOperator;
 +        }
 +        globalPlanRoots.add(new MutableObject<ILogicalOperator>(topOp));
 +        ILogicalPlan plan = new ALogicalPlanImpl(globalPlanRoots);
 +        eliminateSharedOperatorReferenceForPlan(plan);
 +        return plan;
 +    }
 +
 +    private DatasetDataSource validateDatasetInfo(AqlMetadataProvider metadataProvider, String dataverseName,
 +            String datasetName) throws AlgebricksException {
 +        Dataset dataset = metadataProvider.findDataset(dataverseName, datasetName);
 +        if (dataset == null) {
 +            throw new AlgebricksException("Cannot find dataset " + datasetName + " in dataverse " + dataverseName);
 +        }
 +        if (dataset.getDatasetType() == DatasetType.EXTERNAL) {
 +            throw new AlgebricksException("Cannot write output to an external dataset.");
 +        }
 +        AqlSourceId sourceId = new AqlSourceId(dataverseName, datasetName);
 +        IAType itemType = metadataProvider.findType(dataset.getItemTypeDataverseName(), dataset.getItemTypeName());
 +        IAType metaItemType = metadataProvider.findType(dataset.getMetaItemTypeDataverseName(),
 +                dataset.getMetaItemTypeName());
 +        INodeDomain domain = metadataProvider.findNodeDomain(dataset.getNodeGroupName());
 +        DatasetDataSource dataSource = new DatasetDataSource(sourceId, dataset, itemType, metaItemType,
 +                AqlDataSourceType.INTERNAL_DATASET, dataset.getDatasetDetails(), domain);
 +        return dataSource;
 +    }
 +
 +    private FileSplit getDefaultOutputFileLocation() throws MetadataException {
 +        String outputDir = System.getProperty("java.io.tmpDir");
 +        String filePath = outputDir + System.getProperty("file.separator") + OUTPUT_FILE_PREFIX
 +                + outputFileID.incrementAndGet();
 +        AsterixMetadataProperties metadataProperties = AsterixAppContextInfo.getInstance().getMetadataProperties();
 +        return new FileSplit(metadataProperties.getMetadataNodeName(), new FileReference(new File(filePath)));
 +    }
 +
 +    @Override
 +    public Pair<ILogicalOperator, LogicalVariable> visit(LetClause lc, Mutable<ILogicalOperator> tupSource)
 +            throws AsterixException {
 +        LogicalVariable v;
 +        ILogicalOperator returnedOp;
 +
 +        switch (lc.getBindingExpr().getKind()) {
 +            case VARIABLE_EXPRESSION: {
 +                v = context.newVar(lc.getVarExpr());
 +                LogicalVariable prev = context.getVar(((VariableExpr) lc.getBindingExpr()).getVar().getId());
 +                returnedOp = new AssignOperator(v,
 +                        new MutableObject<ILogicalExpression>(new VariableReferenceExpression(prev)));
 +                returnedOp.getInputs().add(tupSource);
 +                break;
 +            }
 +            default: {
 +                v = context.newVar(lc.getVarExpr());
 +                Pair<ILogicalExpression, Mutable<ILogicalOperator>> eo = langExprToAlgExpression(lc.getBindingExpr(),
 +                        tupSource);
 +                returnedOp = new AssignOperator(v, new MutableObject<ILogicalExpression>(eo.first));
 +                returnedOp.getInputs().add(eo.second);
 +                break;
 +            }
 +        }
 +        return new Pair<ILogicalOperator, LogicalVariable>(returnedOp, v);
 +    }
 +
 +    @Override
 +    public Pair<ILogicalOperator, LogicalVariable> visit(FieldAccessor fa, Mutable<ILogicalOperator> tupSource)
 +            throws AsterixException {
 +        Pair<ILogicalExpression, Mutable<ILogicalOperator>> p = langExprToAlgExpression(fa.getExpr(), tupSource);
 +        LogicalVariable v = context.newVar();
 +        AbstractFunctionCallExpression fldAccess = new ScalarFunctionCallExpression(
 +                FunctionUtil.getFunctionInfo(AsterixBuiltinFunctions.FIELD_ACCESS_BY_NAME));
 +        fldAccess.getArguments().add(new MutableObject<ILogicalExpression>(p.first));
 +        ILogicalExpression faExpr = new ConstantExpression(
 +                new AsterixConstantValue(new AString(fa.getIdent().getValue())));
 +        fldAccess.getArguments().add(new MutableObject<ILogicalExpression>(faExpr));
 +        AssignOperator a = new AssignOperator(v, new MutableObject<ILogicalExpression>(fldAccess));
 +        a.getInputs().add(p.second);
 +        return new Pair<ILogicalOperator, LogicalVariable>(a, v);
 +    }
 +
 +    @Override
 +    public Pair<ILogicalOperator, LogicalVariable> visit(IndexAccessor ia, Mutable<ILogicalOperator> tupSource)
 +            throws AsterixException {
 +        Pair<ILogicalExpression, Mutable<ILogicalOperator>> p = langExprToAlgExpression(ia.getExpr(), tupSource);
 +        LogicalVariable v = context.newVar();
 +        AbstractFunctionCallExpression f;
 +        if (ia.isAny()) {
 +            f = new ScalarFunctionCallExpression(
 +                    FunctionUtil.getFunctionInfo(AsterixBuiltinFunctions.ANY_COLLECTION_MEMBER));
 +            f.getArguments().add(new MutableObject<ILogicalExpression>(p.first));
 +        } else {
 +            Pair<ILogicalExpression, Mutable<ILogicalOperator>> indexPair = langExprToAlgExpression(ia.getIndexExpr(),
 +                    tupSource);
 +            f = new ScalarFunctionCallExpression(FunctionUtil.getFunctionInfo(AsterixBuiltinFunctions.GET_ITEM));
 +            f.getArguments().add(new MutableObject<ILogicalExpression>(p.first));
 +            f.getArguments().add(new MutableObject<ILogicalExpression>(indexPair.first));
 +        }
 +        AssignOperator a = new AssignOperator(v, new MutableObject<ILogicalExpression>(f));
 +        a.getInputs().add(p.second);
 +        return new Pair<ILogicalOperator, LogicalVariable>(a, v);
 +    }
 +
 +    @Override
 +    public Pair<ILogicalOperator, LogicalVariable> visit(CallExpr fcall, Mutable<ILogicalOperator> tupSource)
 +            throws AsterixException {
 +        LogicalVariable v = context.newVar();
 +        FunctionSignature signature = fcall.getFunctionSignature();
 +        List<Mutable<ILogicalExpression>> args = new ArrayList<Mutable<ILogicalExpression>>();
 +        Mutable<ILogicalOperator> topOp = tupSource;
 +
 +        for (Expression expr : fcall.getExprList()) {
 +            switch (expr.getKind()) {
 +                case VARIABLE_EXPRESSION: {
 +                    LogicalVariable var = context.getVar(((VariableExpr) expr).getVar().getId());
 +                    args.add(new MutableObject<ILogicalExpression>(new VariableReferenceExpression(var)));
 +                    break;
 +                }
 +                case LITERAL_EXPRESSION: {
 +                    LiteralExpr val = (LiteralExpr) expr;
 +                    args.add(new MutableObject<ILogicalExpression>(new ConstantExpression(
 +                            new AsterixConstantValue(ConstantHelper.objectFromLiteral(val.getValue())))));
 +                    break;
 +                }
 +                default: {
 +                    Pair<ILogicalExpression, Mutable<ILogicalOperator>> eo = langExprToAlgExpression(expr, topOp);
 +                    AbstractLogicalOperator o1 = (AbstractLogicalOperator) eo.second.getValue();
 +                    args.add(new MutableObject<ILogicalExpression>(eo.first));
 +                    if (o1 != null && !(o1.getOperatorTag() == LogicalOperatorTag.ASSIGN && hasOnlyChild(o1, topOp))) {
 +                        topOp = eo.second;
 +                    }
 +                    break;
 +                }
 +            }
 +        }
 +
 +        AbstractFunctionCallExpression f;
 +        if ((f = lookupUserDefinedFunction(signature, args)) == null) {
 +            f = lookupBuiltinFunction(signature.getName(), signature.getArity(), args);
 +        }
 +
 +        if (f == null) {
 +            throw new AsterixException(" Unknown function " + signature.getName() + "@" + signature.getArity());
 +        }
 +
 +        // Put hints into function call expr.
 +        if (fcall.hasHints()) {
 +            for (IExpressionAnnotation hint : fcall.getHints()) {
 +                f.getAnnotations().put(hint, hint);
 +            }
 +        }
 +
 +        AssignOperator op = new AssignOperator(v, new MutableObject<ILogicalExpression>(f));
 +        if (topOp != null) {
 +            op.getInputs().add(topOp);
 +        }
 +
 +        return new Pair<ILogicalOperator, LogicalVariable>(op, v);
 +    }
 +
 +    private AbstractFunctionCallExpression lookupUserDefinedFunction(FunctionSignature signature,
 +            List<Mutable<ILogicalExpression>> args) throws MetadataException {
 +        if (signature.getNamespace() == null) {
 +            return null;
 +        }
 +        Function function = MetadataManager.INSTANCE.getFunction(metadataProvider.getMetadataTxnContext(), signature);
 +        if (function == null) {
 +            return null;
 +        }
 +        AbstractFunctionCallExpression f = null;
 +        if (function.getLanguage().equalsIgnoreCase(Function.LANGUAGE_JAVA)) {
 +            IFunctionInfo finfo = ExternalFunctionCompilerUtil
 +                    .getExternalFunctionInfo(metadataProvider.getMetadataTxnContext(), function);
 +            f = new ScalarFunctionCallExpression(finfo, args);
 +        } else if (function.getLanguage().equalsIgnoreCase(Function.LANGUAGE_AQL)) {
 +            IFunctionInfo finfo = FunctionUtil.getFunctionInfo(signature);
 +            f = new ScalarFunctionCallExpression(finfo, args);
 +        } else {
 +            throw new MetadataException(
 +                    " User defined functions written in " + function.getLanguage() + " are not supported");
 +        }
 +        return f;
 +    }
 +
 +    private AbstractFunctionCallExpression lookupBuiltinFunction(String functionName, int arity,
 +            List<Mutable<ILogicalExpression>> args) {
 +        AbstractFunctionCallExpression f = null;
 +        FunctionIdentifier fi = new FunctionIdentifier(AlgebricksBuiltinFunctions.ALGEBRICKS_NS, functionName, arity);
 +        AsterixFunctionInfo afi = AsterixBuiltinFunctions.lookupFunction(fi);
 +        FunctionIdentifier builtinAquafi = afi == null ? null : afi.getFunctionIdentifier();
 +
 +        if (builtinAquafi != null) {
 +            fi = builtinAquafi;
 +        } else {
 +            fi = new FunctionIdentifier(FunctionConstants.ASTERIX_NS, functionName, arity);
 +            afi = AsterixBuiltinFunctions.lookupFunction(fi);
 +            if (afi == null) {
 +                return null;
 +            }
 +        }
 +        if (AsterixBuiltinFunctions.isBuiltinAggregateFunction(fi)) {
 +            f = AsterixBuiltinFunctions.makeAggregateFunctionExpression(fi, args);
 +        } else if (AsterixBuiltinFunctions.isBuiltinUnnestingFunction(fi)) {
 +            UnnestingFunctionCallExpression ufce = new UnnestingFunctionCallExpression(FunctionUtil.getFunctionInfo(fi),
 +                    args);
 +            ufce.setReturnsUniqueValues(AsterixBuiltinFunctions.returnsUniqueValues(fi));
 +            f = ufce;
 +        } else {
 +            f = new ScalarFunctionCallExpression(FunctionUtil.getFunctionInfo(fi), args);
 +        }
 +        return f;
 +    }
 +
 +    @Override
 +    public Pair<ILogicalOperator, LogicalVariable> visit(FunctionDecl fd, Mutable<ILogicalOperator> tupSource) {
 +        throw new IllegalStateException("Function declarations should be inlined at AST rewriting phase.");
 +    }
 +
 +    @SuppressWarnings("unchecked")
 +    @Override
 +    public Pair<ILogicalOperator, LogicalVariable> visit(GroupbyClause gc, Mutable<ILogicalOperator> tupSource)
 +            throws AsterixException {
-         GroupByOperator gOp = new GroupByOperator();
 +        Mutable<ILogicalOperator> topOp = tupSource;
-         for (GbyVariableExpressionPair ve : gc.getGbyPairList()) {
-             LogicalVariable v;
-             VariableExpr vexpr = ve.getVar();
-             if (vexpr != null) {
-                 v = context.newVar(vexpr);
-             } else {
-                 v = context.newVar();
-             }
-             Pair<ILogicalExpression, Mutable<ILogicalOperator>> eo = langExprToAlgExpression(ve.getExpr(), topOp);
-             gOp.addGbyExpression(v, eo.first);
-             topOp = eo.second;
-         }
-         for (GbyVariableExpressionPair ve : gc.getDecorPairList()) {
-             LogicalVariable v;
-             VariableExpr vexpr = ve.getVar();
-             if (vexpr != null) {
-                 v = context.newVar(vexpr);
-             } else {
-                 v = context.newVar();
-             }
-             Pair<ILogicalExpression, Mutable<ILogicalOperator>> eo = langExprToAlgExpression(ve.getExpr(), topOp);
-             gOp.addDecorExpression(v, eo.first);
-             topOp = eo.second;
-         }
- 
 +        if (gc.hasGroupVar()) {
 +            List<Pair<Expression, Identifier>> groupFieldList = gc.getGroupFieldList();
 +            List<Mutable<ILogicalExpression>> groupRecordConstructorArgList = new ArrayList<>();
 +            for (Pair<Expression, Identifier> groupField : groupFieldList) {
 +                ILogicalExpression groupFieldNameExpr = langExprToAlgExpression(
 +                        new LiteralExpr(new StringLiteral(groupField.second.getValue())), topOp).first;
 +                groupRecordConstructorArgList.add(new MutableObject<ILogicalExpression>(groupFieldNameExpr));
 +                ILogicalExpression groupFieldExpr = langExprToAlgExpression(groupField.first, topOp).first;
 +                groupRecordConstructorArgList.add(new MutableObject<ILogicalExpression>(groupFieldExpr));
 +            }
 +            LogicalVariable groupVar = context.newVar(gc.getGroupVar());
 +            AssignOperator groupVarAssignOp = new AssignOperator(groupVar,
 +                    new MutableObject<ILogicalExpression>(new ScalarFunctionCallExpression(
 +                            FunctionUtil.getFunctionInfo(AsterixBuiltinFunctions.OPEN_RECORD_CONSTRUCTOR),
 +                            groupRecordConstructorArgList)));
 +            groupVarAssignOp.getInputs().add(topOp);
 +            topOp = new MutableObject<ILogicalOperator>(groupVarAssignOp);
 +        }
++        if (gc.isGroupAll()) {
++            List<LogicalVariable> aggVars = new ArrayList<>();
++            List<Mutable<ILogicalExpression>> aggFuncs = new ArrayList<>();
++            for (VariableExpr var : gc.getWithVarList()) {
++                LogicalVariable aggVar = context.newVar();
++                LogicalVariable oldVar = context.getVar(var);
++                List<Mutable<ILogicalExpression>> flArgs = new ArrayList<Mutable<ILogicalExpression>>();
++                flArgs.add(new MutableObject<ILogicalExpression>(new VariableReferenceExpression(oldVar)));
++                AggregateFunctionCallExpression fListify = AsterixBuiltinFunctions
++                        .makeAggregateFunctionExpression(AsterixBuiltinFunctions.LISTIFY, flArgs);
++                aggVars.add(aggVar);
++                aggFuncs.add(new MutableObject<ILogicalExpression>(fListify));
++                // Hide the variable that was part of the "with", replacing it with
++                // the one bound by the aggregation op.
++                context.setVar(var, aggVar);
++            }
++            AggregateOperator aggOp = new AggregateOperator(aggVars, aggFuncs);
++            aggOp.getInputs().add(topOp);
++            return new Pair<ILogicalOperator, LogicalVariable>(aggOp, null);
++        } else {
++            GroupByOperator gOp = new GroupByOperator();
++            for (GbyVariableExpressionPair ve : gc.getGbyPairList()) {
++                LogicalVariable v;
++                VariableExpr vexpr = ve.getVar();
++                if (vexpr != null) {
++                    v = context.newVar(vexpr);
++                } else {
++                    v = context.newVar();
++                }
++                Pair<ILogicalExpression, Mutable<ILogicalOperator>> eo = langExprToAlgExpression(ve.getExpr(), topOp);
++                gOp.addGbyExpression(v, eo.first);
++                topOp = eo.second;
++            }
++            for (GbyVariableExpressionPair ve : gc.getDecorPairList()) {
++                LogicalVariable v;
++                VariableExpr vexpr = ve.getVar();
++                if (vexpr != null) {
++                    v = context.newVar(vexpr);
++                } else {
++                    v = context.newVar();
++                }
++                Pair<ILogicalExpression, Mutable<ILogicalOperator>> eo = langExprToAlgExpression(ve.getExpr(), topOp);
++                gOp.addDecorExpression(v, eo.first);
++                topOp = eo.second;
++            }
 +
-         gOp.getInputs().add(topOp);
-         for (VariableExpr var : gc.getWithVarList()) {
-             LogicalVariable aggVar = context.newVar();
-             LogicalVariable oldVar = context.getVar(var);
-             List<Mutable<ILogicalExpression>> flArgs = new ArrayList<Mutable<ILogicalExpression>>(1);
-             flArgs.add(new MutableObject<ILogicalExpression>(new VariableReferenceExpression(oldVar)));
-             AggregateFunctionCallExpression fListify = AsterixBuiltinFunctions
-                     .makeAggregateFunctionExpression(AsterixBuiltinFunctions.LISTIFY, flArgs);
-             AggregateOperator agg = new AggregateOperator(mkSingletonArrayList(aggVar),
-                     (List) mkSingletonArrayList(new MutableObject<ILogicalExpression>(fListify)));
- 
-             agg.getInputs().add(new MutableObject<ILogicalOperator>(
-                     new NestedTupleSourceOperator(new MutableObject<ILogicalOperator>(gOp))));
-             ILogicalPlan plan = new ALogicalPlanImpl(new MutableObject<ILogicalOperator>(agg));
-             gOp.getNestedPlans().add(plan);
-             // Hide the variable that was part of the "with", replacing it with
-             // the one bound by the aggregation op.
-             context.setVar(var, aggVar);
++            gOp.getInputs().add(topOp);
++            for (VariableExpr var : gc.getWithVarList()) {
++                LogicalVariable aggVar = context.newVar();
++                LogicalVariable oldVar = context.getVar(var);
++                List<Mutable<ILogicalExpression>> flArgs = new ArrayList<Mutable<ILogicalExpression>>(1);
++                flArgs.add(new MutableObject<ILogicalExpression>(new VariableReferenceExpression(oldVar)));
++                AggregateFunctionCallExpression fListify = AsterixBuiltinFunctions
++                        .makeAggregateFunctionExpression(AsterixBuiltinFunctions.LISTIFY, flArgs);
++                AggregateOperator agg = new AggregateOperator(mkSingletonArrayList(aggVar),
++                        (List) mkSingletonArrayList(new MutableObject<ILogicalExpression>(fListify)));
++
++                agg.getInputs().add(new MutableObject<ILogicalOperator>(
++                        new NestedTupleSourceOperator(new MutableObject<ILogicalOperator>(gOp))));
++                ILogicalPlan plan = new ALogicalPlanImpl(new MutableObject<ILogicalOperator>(agg));
++                gOp.getNestedPlans().add(plan);
++                // Hide the variable that was part of the "with", replacing it with
++                // the one bound by the aggregation op.
++                context.setVar(var, aggVar);
++            }
++            gOp.getAnnotations().put(OperatorAnnotations.USE_HASH_GROUP_BY, gc.hasHashGroupByHint());
++            return new Pair<ILogicalOperator, LogicalVariable>(gOp, null);
 +        }
 +
-         gOp.getAnnotations().put(OperatorAnnotations.USE_HASH_GROUP_BY, gc.hasHashGroupByHint());
-         return new Pair<ILogicalOperator, LogicalVariable>(gOp, null);
 +    }
 +
 +    @Override
 +    public Pair<ILogicalOperator, LogicalVariable> visit(IfExpr ifexpr, Mutable<ILogicalOperator> tupSource)
 +            throws AsterixException {
 +        // In the most general case, IfThenElse is translated in the following
 +        // way.
 +        //
 +        // We assign the result of the condition to one variable varCond.
 +        // We create one subplan which contains the plan for the "then" branch,
 +        // on top of which there is a selection whose condition is varCond.
 +        // Similarly, we create one subplan for the "else" branch, in which the
 +        // selection is not(varCond).
 +        // Finally, we select the desired result.
 +        Pair<ILogicalOperator, LogicalVariable> pCond = ifexpr.getCondExpr().accept(this, tupSource);
 +        LogicalVariable varCond = pCond.second;
 +
 +        //Creates a subplan for the "then" branch.
 +        Pair<ILogicalOperator, LogicalVariable> opAndVarForThen = constructSubplanOperatorForBranch(pCond.first,
 +                new MutableObject<ILogicalExpression>(new VariableReferenceExpression(varCond)), ifexpr.getThenExpr());
 +
 +        // Creates a subplan for the "else" branch.
 +        AbstractFunctionCallExpression notVarCond = new ScalarFunctionCallExpression(
 +                FunctionUtil.getFunctionInfo(AlgebricksBuiltinFunctions.NOT), Collections.singletonList(
 +                        new MutableObject<ILogicalExpression>(new VariableReferenceExpression(varCond))));
 +        Pair<ILogicalOperator, LogicalVariable> opAndVarForElse = constructSubplanOperatorForBranch(
 +                opAndVarForThen.first, new MutableObject<ILogicalExpression>(notVarCond), ifexpr.getElseExpr());
 +
 +        // Uses switch-case function to select the results of two branches.
 +        LogicalVariable selectVar = context.newVar();
 +        List<Mutable<ILogicalExpression>> arguments = new ArrayList<>();
 +        arguments.add(new MutableObject<ILogicalExpression>(new VariableReferenceExpression(varCond)));
 +        arguments.add(new MutableObject<ILogicalExpression>(ConstantExpression.TRUE));
 +        arguments.add(new MutableObject<ILogicalExpression>(new VariableReferenceExpression(opAndVarForThen.second)));
 +        arguments.add(new MutableObject<ILogicalExpression>(ConstantExpression.FALSE));
 +        arguments.add(new MutableObject<ILogicalExpression>(new VariableReferenceExpression(opAndVarForElse.second)));
 +        AbstractFunctionCallExpression swithCaseExpr = new ScalarFunctionCallExpression(
 +                FunctionUtil.getFunctionInfo(AsterixBuiltinFunctions.SWITCH_CASE), arguments);
 +        AssignOperator assignOp = new AssignOperator(selectVar, new MutableObject<ILogicalExpression>(swithCaseExpr));
 +        assignOp.getInputs().add(new MutableObject<ILogicalOperator>(opAndVarForElse.first));
 +
 +        // Unnests the selected ("if" or "else") result.
 +        LogicalVariable unnestVar = context.newVar();
 +        UnnestOperator unnestOp = new UnnestOperator(unnestVar,
 +                new MutableObject<ILogicalExpression>(new UnnestingFunctionCallExpression(
 +                        FunctionUtil.getFunctionInfo(AsterixBuiltinFunctions.SCAN_COLLECTION),
 +                        Collections.singletonList(
 +                                new MutableObject<ILogicalExpression>(new VariableReferenceExpression(selectVar))))));
 +        unnestOp.getInputs().add(new MutableObject<ILogicalOperator>(assignOp));
 +
 +        // Produces the final result.
 +        LogicalVariable resultVar = context.newVar();
 +        AssignOperator finalAssignOp = new AssignOperator(resultVar,
 +                new MutableObject<ILogicalExpression>(new VariableReferenceExpression(unnestVar)));
 +        finalAssignOp.getInputs().add(new MutableObject<ILogicalOperator>(unnestOp));
 +        return new Pair<ILogicalOperator, LogicalVariable>(finalAssignOp, resultVar);
 +    }
 +
 +    @Override
 +    public Pair<ILogicalOperator, LogicalVariable> visit(LiteralExpr l, Mutable<ILogicalOperator> tupSource) {
 +        LogicalVariable var = context.newVar();
 +        AssignOperator a = new AssignOperator(var, new MutableObject<ILogicalExpression>(
 +                new ConstantExpression(new AsterixConstantValue(ConstantHelper.objectFromLiteral(l.getValue())))));
 +        if (tupSource != null) {
 +            a.getInputs().add(tupSource);
 +        }
 +        return new Pair<ILogicalOperator, LogicalVariable>(a, var);
 +    }
 +
 +    @Override
 +    public Pair<ILogicalOperator, LogicalVariable> visit(OperatorExpr op, Mutable<ILogicalOperator> tupSource)
 +            throws AsterixException {
 +        List<OperatorType> ops = op.getOpList();
 +        int nOps = ops.size();
 +
 +        if (nOps > 0 && (ops.get(0) == OperatorType.AND || ops.get(0) == OperatorType.OR)) {
 +            return visitAndOrOperator(op, tupSource);
 +        }
 +
 +        List<Expression> exprs = op.getExprList();
 +
 +        Mutable<ILogicalOperator> topOp = tupSource;
 +
 +        ILogicalExpression currExpr = null;
 +        for (int i = 0; i <= nOps; i++) {
 +
 +            Pair<ILogicalExpression, Mutable<ILogicalOperator>> p = langExprToAlgExpression(exprs.get(i), topOp);
 +            topOp = p.second;
 +            ILogicalExpression e = p.first;
 +            // now look at the operator
 +            if (i < nOps) {
 +                if (OperatorExpr.opIsComparison(ops.get(i))) {
 +                    AbstractFunctionCallExpression c = createComparisonExpression(ops.get(i));
 +
 +                    // chain the operators
 +                    if (i == 0) {
 +                        c.getArguments().add(new MutableObject<ILogicalExpression>(e));
 +                        currExpr = c;
 +                        if (op.isBroadcastOperand(i)) {
 +                            BroadcastExpressionAnnotation bcast = new BroadcastExpressionAnnotation();
 +                            bcast.setObject(BroadcastSide.LEFT);
 +                            c.getAnnotations().put(BroadcastExpressionAnnotation.BROADCAST_ANNOTATION_KEY, bcast);
 +                        }
 +                    } else {
 +                        ((AbstractFunctionCallExpression) currExpr).getArguments()
 +                                .add(new MutableObject<ILogicalExpression>(e));
 +                        c.getArguments().add(new MutableObject<ILogicalExpression>(currExpr));
 +                        currExpr = c;
 +                        if (i == 1 && op.isBroadcastOperand(i)) {
 +                            BroadcastExpressionAnnotation bcast = new BroadcastExpressionAnnotation();
 +                            bcast.setObject(BroadcastSide.RIGHT);
 +                            c.getAnnotations().put(BroadcastExpressionAnnotation.BROADCAST_ANNOTATION_KEY, bcast);
 +                        }
 +                    }
 +                } else {
 +                    AbstractFunctionCallExpression f = createFunctionCallExpressionForBuiltinOperator(ops.get(i));
 +
 +                    if (i == 0) {
 +                        f.getArguments().add(new MutableObject<ILogicalExpression>(e));
 +                        currExpr = f;
 +                    } else {
 +                        ((AbstractFunctionCallExpression) currExpr).getArguments()
 +                                .add(new MutableObject<ILogicalExpression>(e));
 +                        f.getArguments().add(new MutableObject<ILogicalExpression>(currExpr));
 +                        currExpr = f;
 +                    }
 +                }
 +            } else { // don't forget the last expression...
 +                ((AbstractFunctionCallExpression) currExpr).getArguments()
 +                        .add(new MutableObject<ILogicalExpression>(e));
 +                if (i == 1 && op.isBroadcastOperand(i)) {
 +                    BroadcastExpressionAnnotation bcast = new BroadcastExpressionAnnotation();
 +                    bcast.setObject(BroadcastSide.RIGHT);
 +                    ((AbstractFunctionCallExpression) currExpr).getAnnotations()
 +                            .put(BroadcastExpressionAnnotation.BROADCAST_ANNOTATION_KEY, bcast);
 +                }
 +            }
 +        }
 +
 +        // Add hints as annotations.
 +        if (op.hasHints() && (currExpr instanceof AbstractFunctionCallExpression)) {
 +            AbstractFunctionCallExpression currFuncExpr = (AbstractFunctionCallExpression) currExpr;
 +            for (IExpressionAnnotation hint : op.getHints()) {
 +                currFuncExpr.getAnnotations().put(hint, hint);
 +            }
 +        }
 +
 +        LogicalVariable assignedVar = context.newVar();
 +        AssignOperator a = new AssignOperator(assignedVar, new MutableObject<ILogicalExpression>(currExpr));
 +
 +        a.getInputs().add(topOp);
 +
 +        return new Pair<ILogicalOperator, LogicalVariable>(a, assignedVar);
 +    }
 +
 +    @Override
 +    public Pair<ILogicalOperator, LogicalVariable> visit(OrderbyClause oc, Mutable<ILogicalOperator> tupSource)
 +            throws AsterixException {
 +        OrderOperator ord = new OrderOperator();
 +        Iterator<OrderModifier> modifIter = oc.getModifierList().iterator();
 +        Mutable<ILogicalOperator> topOp = tupSource;
 +        for (Expression e : oc.getOrderbyList()) {
 +            Pair<ILogicalExpression, Mutable<ILogicalOperator>> p = langExprToAlgExpression(e, topOp);
 +            OrderModifier m = modifIter.next();
 +            OrderOperator.IOrder comp = (m == OrderModifier.ASC) ? OrderOperator.ASC_ORDER : OrderOperator.DESC_ORDER;
 +            ord.getOrderExpressions().add(new Pair<IOrder, Mutable<ILogicalExpression>>(comp,
 +                    new MutableObject<ILogicalExpression>(p.first)));
 +            topOp = p.second;
 +        }
 +        ord.getInputs().add(topOp);
 +        if (oc.getNumTuples() > 0) {
 +            ord.getAnnotations().put(OperatorAnnotations.CARDINALITY, oc.getNumTuples());
 +        }
 +        if (oc.getNumFrames() > 0) {
 +            ord.getAnnotations().put(OperatorAnnotations.MAX_NUMBER_FRAMES, oc.getNumFrames());
 +        }
 +        if (oc.getRangeMap() != null) {
 +            Iterator<OrderModifier> orderModifIter = oc.getModifierList().iterator();
 +            boolean ascending = (orderModifIter.next() == OrderModifier.ASC);
 +            RangeMapBuilder.verifyRangeOrder(oc.getRangeMap(), ascending);
 +            ord.getAnnotations().put(OperatorAnnotations.USE_RANGE_CONNECTOR, oc.getRangeMap());
 +        }
 +        return new Pair<ILogicalOperator, LogicalVariable>(ord, null);
 +    }
 +
 +    @Override
 +    public Pair<ILogicalOperator, LogicalVariable> visit(QuantifiedExpression qe, Mutable<ILogicalOperator> tupSource)
 +            throws AsterixException {
 +        Mutable<ILogicalOperator> topOp = tupSource;
 +
 +        ILogicalOperator firstOp = null;
 +        Mutable<ILogicalOperator> lastOp = null;
 +
 +        for (QuantifiedPair qt : qe.getQuantifiedList()) {
 +            Pair<ILogicalExpression, Mutable<ILogicalOperator>> eo1 = langExprToAlgExpression(qt.getExpr(), topOp);
 +            topOp = eo1.second;
 +            LogicalVariable uVar = context.newVar(qt.getVarExpr());
 +            ILogicalOperator u = new UnnestOperator(uVar,
 +                    new MutableObject<ILogicalExpression>(makeUnnestExpression(eo1.first)));
 +
 +            if (firstOp == null) {
 +                firstOp = u;
 +            }
 +            if (lastOp != null) {
 +                u.getInputs().add(lastOp);
 +            }
 +            lastOp = new MutableObject<ILogicalOperator>(u);
 +        }
 +
 +        // We make all the unnest correspond. to quantif. vars. sit on top
 +        // in the hope of enabling joins & other optimiz.
 +        firstOp.getInputs().add(topOp);
 +        topOp = lastOp;
 +
 +        Pair<ILogicalExpression, Mutable<ILogicalOperator>> eo2 = langExprToAlgExpression(qe.getSatisfiesExpr(), topOp);
 +
 +        AggregateFunctionCallExpression fAgg;
 +        SelectOperator s;
 +        if (qe.getQuantifier() == Quantifier.SOME) {
 +            s = new SelectOperator(new MutableObject<ILogicalExpression>(eo2.first), false, null);
 +            s.getInputs().add(eo2.second);
 +            fAgg = AsterixBuiltinFunctions.makeAggregateFunctionExpression(AsterixBuiltinFunctions.NON_EMPTY_STREAM,
 +                    new ArrayList<Mutable<ILogicalExpression>>());
 +        } else { // EVERY
 +            List<Mutable<ILogicalExpression>> satExprList = new ArrayList<Mutable<ILogicalExpression>>(1);
 +            satExprList.add(new MutableObject<ILogicalExpression>(eo2.first));
 +            s = new SelectOperator(new MutableObject<ILogicalExpression>(new ScalarFunctionCallExpression(
 +                    FunctionUtil.getFunctionInfo(AlgebricksBuiltinFunctions.NOT), satExprList)), false, null);
 +            s.getInputs().add(eo2.second);
 +            fAgg = AsterixBuiltinFunctions.makeAggregateFunctionExpression(AsterixBuiltinFunctions.EMPTY_STREAM,
 +                    new ArrayList<Mutable<ILogicalExpression>>());
 +        }
 +        LogicalVariable qeVar = context.newVar();
 +        AggregateOperator a = new AggregateOperator(mkSingletonArrayList(qeVar),
 +                (List) mkSingletonArrayList(new MutableObject<ILogicalExpression>(fAgg)));
 +        a.getInputs().add(new MutableObject<ILogicalOperator>(s));
 +        return new Pair<ILogicalOperator, LogicalVariable>(a, qeVar);
 +    }
 +
 +    @Override
 +    public Pair<ILogicalOperator, LogicalVariable> visit(Query q, Mutable<ILogicalOperator> tupSource)
 +            throws AsterixException {
 +        return q.getBody().accept(this, tupSource);
 +    }
 +
 +    @Override
 +    public Pair<ILogicalOperator, LogicalVariable> visit(RecordConstructor rc, Mutable<ILogicalOperator> tupSource)
 +            throws AsterixException {
 +        AbstractFunctionCallExpression f = new ScalarFunctionCallExpression(
 +                FunctionUtil.getFunctionInfo(AsterixBuiltinFunctions.OPEN_RECORD_CONSTRUCTOR));
 +        LogicalVariable v1 = context.newVar();
 +        AssignOperator a = new AssignOperator(v1, new MutableObject<ILogicalExpression>(f));
 +        Mutable<ILogicalOperator> topOp = tupSource;
 +        for (FieldBinding fb : rc.getFbList()) {
 +            Pair<ILogicalExpression, Mutable<ILogicalOperator>> eo1 = langExprToAlgExpression(fb.getLeftExpr(), topOp);
 +            f.getArguments().add(new MutableObject<ILogicalExpression>(eo1.first));
 +            topOp = eo1.second;
 +            Pair<ILogicalExpression, Mutable<ILogicalOperator>> eo2 = langExprToAlgExpression(fb.getRightExpr(), topOp);
 +            f.getArguments().add(new MutableObject<ILogicalExpression>(eo2.first));
 +            topOp = eo2.second;
 +        }
 +        a.getInputs().add(topOp);
 +        return new Pair<ILogicalOperator, LogicalVariable>(a, v1);
 +    }
 +
 +    @Override
 +    public Pair<ILogicalOperator, LogicalVariable> visit(ListConstructor lc, Mutable<ILogicalOperator> tupSource)
 +            throws AsterixException {
 +        FunctionIdentifier fid = (lc.getType() == Type.ORDERED_LIST_CONSTRUCTOR)
 +                ? AsterixBuiltinFunctions.ORDERED_LIST_CONSTRUCTOR : AsterixBuiltinFunctions.UNORDERED_LIST_CONSTRUCTOR;
 +        AbstractFunctionCallExpression f = new ScalarFunctionCallExpression(FunctionUtil.getFunctionInfo(fid));
 +        LogicalVariable v1 = context.newVar();
 +        AssignOperator a = new AssignOperator(v1, new MutableObject<ILogicalExpression>(f));
 +        Mutable<ILogicalOperator> topOp = tupSource;
 +        for (Expression expr : lc.getExprList()) {
 +            Pair<ILogicalExpression, Mutable<ILogicalOperator>> eo = langExprToAlgExpression(expr, topOp);
 +            f.getArguments().add(new MutableObject<ILogicalExpression>(eo.first));
 +            topOp = eo.second;
 +        }
 +        a.getInputs().add(topOp);
 +        return new Pair<ILogicalOperator, LogicalVariable>(a, v1);
 +    }
 +
 +    @Override
 +    public Pair<ILogicalOperator, LogicalVariable> visit(UnaryExpr u, Mutable<ILogicalOperator> tupSource)
 +            throws AsterixException {
 +        Expression expr = u.getExpr();
 +        Pair<ILogicalExpression, Mutable<ILogicalOperator>> eo = langExprToAlgExpression(expr, tupSource);
 +        LogicalVariable v1 = context.newVar();
 +        AssignOperator a;
 +        if (u.getSign() == Sign.POSITIVE) {
 +            a = new AssignOperator(v1, new MutableObject<ILogicalExpression>(eo.first));
 +        } else {
 +            AbstractFunctionCallExpression m = new ScalarFunctionCallExpression(
 +                    FunctionUtil.getFunctionInfo(AsterixBuiltinFunctions.NUMERIC_UNARY_MINUS));
 +            m.getArguments().add(new MutableObject<ILogicalExpression>(eo.first));
 +            a = new AssignOperator(v1, new MutableObject<ILogicalExpression>(m));
 +        }
 +        a.getInputs().add(eo.second);
 +        return new Pair<ILogicalOperator, LogicalVariable>(a, v1);
 +    }
 +
 +    @Override
 +    public Pair<ILogicalOperator, LogicalVariable> visit(VariableExpr v, Mutable<ILogicalOperator> tupSource) {
 +        // Should we ever get to this method?
 +        LogicalVariable var = context.newVar();
 +        LogicalVariable oldV = context.getVar(v.getVar().getId());
 +        AssignOperator a = new AssignOperator(var,
 +                new MutableObject<ILogicalExpression>(new VariableReferenceExpression(oldV)));
 +        a.getInputs().add(tupSource);
 +        return new Pair<ILogicalOperator, LogicalVariable>(a, var);
 +    }
 +
 +    @Override
 +    public Pair<ILogicalOperator, LogicalVariable> visit(WhereClause w, Mutable<ILogicalOperator> tupSource)
 +            throws AsterixException {
 +        Pair<ILogicalExpression, Mutable<ILogicalOperator>> p = langExprToAlgExpression(w.getWhereExpr(), tupSource);
 +        SelectOperator s = new SelectOperator(new MutableObject<ILogicalExpression>(p.first), false, null);
 +        s.getInputs().add(p.second);
 +        return new Pair<ILogicalOperator, LogicalVariable>(s, null);
 +    }
 +
 +    @Override
 +    public Pair<ILogicalOperator, LogicalVariable> visit(LimitClause lc, Mutable<ILogicalOperator> tupSource)
 +            throws AsterixException {
 +        Pair<ILogicalExpression, Mutable<ILogicalOperator>> p1 = langExprToAlgExpression(lc.getLimitExpr(), tupSource);
 +        LimitOperator opLim;
 +        Expression offset = lc.getOffset();
 +        if (offset != null) {
 +            Pair<ILogicalExpression, Mutable<ILogicalOperator>> p2 = langExprToAlgExpression(offset, p1.second);
 +            opLim = new LimitOperator(p1.first, p2.first);
 +            opLim.getInputs().add(p2.second);
 +        } else {
 +            opLim = new LimitOperator(p1.first);
 +            opLim.getInputs().add(p1.second);
 +        }
 +        return new Pair<ILogicalOperator, LogicalVariable>(opLim, null);
 +    }
 +
 +    protected AbstractFunctionCallExpression createComparisonExpression(OperatorType t) {
 +        FunctionIdentifier fi = operatorTypeToFunctionIdentifier(t);
 +        IFunctionInfo finfo = FunctionUtil.getFunctionInfo(fi);
 +        return new ScalarFunctionCallExpression(finfo);
 +    }
 +
 +    private FunctionIdentifier operatorTypeToFunctionIdentifier(OperatorType t) {
 +        switch (t) {
 +            case EQ: {
 +                return AlgebricksBuiltinFunctions.EQ;
 +            }
 +            case NEQ: {
 +                return AlgebricksBuiltinFunctions.NEQ;
 +            }
 +            case GT: {
 +                return AlgebricksBuiltinFunctions.GT;
 +            }
 +            case GE: {
 +                return AlgebricksBuiltinFunctions.GE;
 +            }
 +            case LT: {
 +                return AlgebricksBuiltinFunctions.LT;
 +            }
 +            case LE: {
 +                return AlgebricksBuiltinFunctions.LE;
 +            }
 +            default: {
 +                throw new IllegalStateException();
 +            }
 +        }
 +    }
 +
 +    protected AbstractFunctionCallExpression createFunctionCallExpressionForBuiltinOperator(OperatorType t)
 +            throws AsterixException {
 +
 +        FunctionIdentifier fid = null;
 +        switch (t) {
 +            case PLUS: {
 +                fid = AlgebricksBuiltinFunctions.NUMERIC_ADD;
 +                break;
 +            }
 +            case MINUS: {
 +                fid = AsterixBuiltinFunctions.NUMERIC_SUBTRACT;
 +                break;
 +            }
 +            case MUL: {
 +                fid = AsterixBuiltinFunctions.NUMERIC_MULTIPLY;
 +                break;
 +            }
 +            case DIV: {
 +                fid = AsterixBuiltinFunctions.NUMERIC_DIVIDE;
 +                break;
 +            }
 +            case MOD: {
 +                fid = AsterixBuiltinFunctions.NUMERIC_MOD;
 +                break;
 +            }
 +            case IDIV: {
 +                fid = AsterixBuiltinFunctions.NUMERIC_IDIV;
 +                break;
 +            }
 +            case CARET: {
 +                fid = AsterixBuiltinFunctions.CARET;
 +                break;
 +            }
 +            case AND: {
 +                fid = AlgebricksBuiltinFunctions.AND;
 +                break;
 +            }
 +            case OR: {
 +                fid = AlgebricksBuiltinFunctions.OR;
 +                break;
 +            }
 +            case FUZZY_EQ: {
 +                fid = AsterixBuiltinFunctions.FUZZY_EQ;
 +                break;
 +            }
 +
 +            default: {
 +                throw new NotImplementedException("Operator " + t + " is not yet implemented");
 +            }
 +        }
 +        return new ScalarFunctionCallExpression(FunctionUtil.getFunctionInfo(fid));
 +    }
 +
 +    private static boolean hasOnlyChild(ILogicalOperator parent, Mutable<ILogicalOperator> childCandidate) {
 +        List<Mutable<ILogicalOperator>> inp = parent.getInputs();
 +        if (inp == null || inp.size() != 1) {
 +            return false;
 +        }
 +        return inp.get(0) == childCandidate;
 +    }
 +
 +    protected Pair<ILogicalExpression, Mutable<ILogicalOperator>> langExprToAlgExpression(Expression expr,
 +            Mutable<ILogicalOperator> topOpRef) throws AsterixException {
 +        switch (expr.getKind()) {
 +            case VARIABLE_EXPRESSION: {
 +                VariableReferenceExpression ve = new VariableReferenceExpression(
 +                        context.getVar(((VariableExpr) expr).getVar().getId()));
 +                return new Pair<ILogicalExpression, Mutable<ILogicalOperator>>(ve, topOpRef);
 +            }
 +            case LITERAL_EXPRESSION: {
 +                LiteralExpr val = (LiteralExpr) expr;
 +                return new Pair<ILogicalExpression, Mutable<ILogicalOperator>>(new ConstantExpression(
 +                        new AsterixConstantValue(ConstantHelper.objectFromLiteral(val.getValue()))), topOpRef);
 +            }
 +            default: {
 +                if (expressionNeedsNoNesting(expr)) {
 +                    Pair<ILogicalOperator, LogicalVariable> p = expr.accept(this, topOpRef);
 +                    ILogicalExpression exp = ((AssignOperator) p.first).getExpressions().get(0).getValue();
 +                    return new Pair<ILogicalExpression, Mutable<ILogicalOperator>>(exp, p.first.getInputs().get(0));
 +                } else {
 +                    Mutable<ILogicalOperator> srcRef = new MutableObject<ILogicalOperator>();
 +                    Pair<ILogicalOperator, LogicalVariable> p = expr.accept(this, srcRef);
 +                    if (p.first.getOperatorTag() == LogicalOperatorTag.SUBPLAN) {
 +                        if (topOpRef.getValue() != null) {
 +                            srcRef.setValue(topOpRef.getValue());
 +                        } else {
 +                            // Re-binds the bottom operator reference to {@code topOpRef}.
 +                            rebindBottomOpRef(p.first, srcRef, topOpRef);
 +                        }
 +                        Mutable<ILogicalOperator> top2 = new MutableObject<ILogicalOperator>(p.first);
 +                        return new Pair<ILogicalExpression, Mutable<ILogicalOperator>>(
 +                                new VariableReferenceExpression(p.second), top2);
 +                    } else {
 +                        SubplanOperator s = new SubplanOperator();
 +                        s.getInputs().add(topOpRef);
 +                        srcRef.setValue(new NestedTupleSourceOperator(new MutableObject<ILogicalOperator>(s)));
 +                        Mutable<ILogicalOperator> planRoot = new MutableObject<ILogicalOperator>(p.first);
 +                        s.setRootOp(planRoot);
 +                        return new Pair<ILogicalExpression, Mutable<ILogicalOperator>>(
 +                                new VariableReferenceExpression(p.second), new MutableObject<ILogicalOperator>(s));
 +                    }
 +                }
 +            }
 +        }
 +    }
 +
 +    protected Pair<ILogicalOperator, LogicalVariable> aggListifyForSubquery(LogicalVariable var,
 +            Mutable<ILogicalOperator> opRef, boolean bProject) {
 +        AggregateFunctionCallExpression funAgg = AsterixBuiltinFunctions.makeAggregateFunctionExpression(
 +                AsterixBuiltinFunctions.LISTIFY, new ArrayList<Mutable<ILogicalExpression>>());
 +        funAgg.getArguments().add(new MutableObject<ILogicalExpression>(new VariableReferenceExpression(var)));
 +
 +        LogicalVariable varListified = context.newSubplanOutputVar();
 +        AggregateOperator agg = new AggregateOperator(mkSingletonArrayList(varListified),
 +                (List) mkSingletonArrayList(new MutableObject<ILogicalExpression>(funAgg)));
 +        agg.getInputs().add(opRef);
 +        ILogicalOperator res;
 +        if (bProject) {
 +            ProjectOperator pr = new ProjectOperator(varListified);
 +            pr.getInputs().add(new MutableObject<ILogicalOperator>(agg));
 +            res = pr;
 +        } else {
 +            res = agg;
 +        }
 +        return new Pair<ILogicalOperator, LogicalVariable>(res, varListified);
 +    }
 +
 +    protected Pair<ILogicalOperator, LogicalVariable> visitAndOrOperator(OperatorExpr op,
 +            Mutable<ILogicalOperator> tupSource) throws AsterixException {
 +        List<OperatorType> ops = op.getOpList();
 +        int nOps = ops.size();
 +
 +        List<Expression> exprs = op.getExprList();
 +
 +        Mutable<ILogicalOperator> topOp = tupSource;
 +
 +        OperatorType opLogical = ops.get(0);
 +        AbstractFunctionCallExpression f = createFunctionCallExpressionForBuiltinOperator(opLogical);
 +
 +        for (int i = 0; i <= nOps; i++) {
 +            Pair<ILogicalExpression, Mutable<ILogicalOperator>> p = langExprToAlgExpression(exprs.get(i), topOp);
 +            topOp = p.second;
 +            // now look at the operator
 +            if (i < nOps) {
 +                if (ops.get(i) != opLogical) {
 +                    throw new TranslationException(
 +                            "Unexpected operator " + ops.get(i) + " in an OperatorExpr starting with " + opLogical);
 +                }
 +            }
 +            f.getArguments().add(new MutableObject<ILogicalExpression>(p.first));
 +        }
 +
 +        LogicalVariable assignedVar = context.newVar();
 +        AssignOperator a = new AssignOperator(assignedVar, new MutableObject<ILogicalExpression>(f));
 +        a.getInputs().add(topOp);
 +
 +        return new Pair<ILogicalOperator, LogicalVariable>(a, assignedVar);
 +
 +    }
 +
 +    protected boolean expressionNeedsNoNesting(Expression expr) {
 +        Kind k = expr.getKind();
 +        return (k == Kind.LITERAL_EXPRESSION) || (k == Kind.LIST_CONSTRUCTOR_EXPRESSION)
 +                || (k == Kind.RECORD_CONSTRUCTOR_EXPRESSION) || (k == Kind.VARIABLE_EXPRESSION)
 +                || (k == Kind.CALL_EXPRESSION) || (k == Kind.OP_EXPRESSION) || (k == Kind.FIELD_ACCESSOR_EXPRESSION)
 +                || (k == Kind.INDEX_ACCESSOR_EXPRESSION) || (k == Kind.UNARY_EXPRESSION) || (k == Kind.IF_EXPRESSION);
 +    }
 +
 +    protected <T> List<T> mkSingletonArrayList(T item) {
 +        ArrayList<T> array = new ArrayList<T>(1);
 +        array.add(item);
 +        return array;
 +    }
 +
 +    protected ILogicalExpression makeUnnestExpression(ILogicalExpression expr) {
 +        List<Mutable<ILogicalExpression>> argRefs = new ArrayList<>();
 +        argRefs.add(new MutableObject<ILogicalExpression>(expr));
 +        switch (expr.getExpressionTag()) {
 +            case VARIABLE: {
 +                return new UnnestingFunctionCallExpression(
 +                        FunctionUtil.getFunctionInfo(AsterixBuiltinFunctions.SCAN_COLLECTION), argRefs);
 +            }
 +            case FUNCTION_CALL: {
 +                AbstractFunctionCallExpression fce = (AbstractFunctionCallExpression) expr;
 +                if (fce.getKind() == FunctionKind.UNNEST) {
 +                    return expr;
 +                } else {
 +                    return new UnnestingFunctionCallExpression(
 +                            FunctionUtil.getFunctionInfo(AsterixBuiltinFunctions.SCAN_COLLECTION), argRefs);
 +                }
 +            }
 +            default: {
 +                return expr;
 +            }
 +        }
 +    }
 +
 +    private boolean rebindBottomOpRef(ILogicalOperator currentOp, Mutable<ILogicalOperator> opRef,
 +            Mutable<ILogicalOperator> replacementOpRef) {
 +        int index = 0;
 +        for (Mutable<ILogicalOperator> childRef : currentOp.getInputs()) {
 +            if (childRef == opRef) {
 +                currentOp.getInputs().set(index, replacementOpRef);
 +                return true;
 +            } else {
 +                if (rebindBottomOpRef(childRef.getValue(), opRef, replacementOpRef)) {
 +                    return true;
 +                }
 +            }
 +            ++index;
 +        }
 +        return false;
 +    }
 +
 +    /**
 +     * Eliminate shared operator references in a query plan.
 +     * Deep copy a new query plan subtree whenever there is a shared operator reference.
 +     *
 +     * @param plan,
 +     *            the query plan.
 +     * @throws AsterixException
 +     */
 +    private void eliminateSharedOperatorReferenceForPlan(ILogicalPlan plan) throws AsterixException {
 +        for (Mutable<ILogicalOperator> opRef : plan.getRoots()) {
 +            Set<Mutable<ILogicalOperator>> opRefSet = new HashSet<>();
 +            eliminateSharedOperatorReference(opRef, opRefSet);
 +        }
 +    }
 +
 +    /**
 +     * Eliminate shared operator references in a query plan rooted at <code>currentOpRef.getValue()</code>.
 +     * Deep copy a new query plan subtree whenever there is a shared operator reference.
 +     *
 +     * @param currentOpRef,
 +     *            the operator reference to consider
 +     * @param opRefSet,
 +     *            the set storing seen operator references so far.
 +     * @return a mapping that maps old variables to new variables, for the ancestors of
 +     *         <code>currentOpRef</code> to replace variables properly.
 +     * @throws AsterixException
 +     */
 +    private Map<LogicalVariable, LogicalVariable> eliminateSharedOperatorReference(
 +            Mutable<ILogicalOperator> currentOpRef, Set<Mutable<ILogicalOperator>> opRefSet) throws AsterixException {
 +        try {
 +            opRefSet.add(currentOpRef);
 +            AbstractLogicalOperator currentOperator = (AbstractLogicalOperator) currentOpRef.getValue();
 +
 +            // Recursively eliminates shared references in nested plans.
 +            if (currentOperator.hasNestedPlans()) {
 +                // Since a nested plan tree itself can never be shared with another nested plan tree in
 +                // another operator, the operation called in the if block does not need to replace
 +                // any variables further for <code>currentOpRef.getValue()</code> nor its ancestor.
 +                AbstractOperatorWithNestedPlans opWithNestedPlan = (AbstractOperatorWithNestedPlans) currentOperator;
 +                for (ILogicalPlan plan : opWithNestedPlan.getNestedPlans()) {
 +                    for (Mutable<ILogicalOperator> rootRef : plan.getRoots()) {
 +                        Set<Mutable<ILogicalOperator>> nestedOpRefSet = new HashSet<>();
 +                        eliminateSharedOperatorReference(rootRef, nestedOpRefSet);
 +                    }
 +                }
 +            }
 +
 +            int childIndex = 0;
 +            Map<LogicalVariable, LogicalVariable> varMap = new HashMap<>();
 +            for (Mutable<ILogicalOperator> childRef : currentOperator.getInputs()) {
 +                if (opRefSet.contains(childRef)) {
 +                    // There is a shared operator reference in the query plan.
 +                    // Deep copies the child plan.
 +                    LogicalOperatorDeepCopyWithNewVariablesVisitor visitor = new LogicalOperatorDeepCopyWithNewVariablesVisitor(
 +                            context, null);
 +                    ILogicalOperator newChild = childRef.getValue().accept(visitor, null);
 +                    Map<LogicalVariable, LogicalVariable> cloneVarMap = visitor.getInputToOutputVariableMapping();
 +
 +                    // Substitute variables according to the deep copy which generates new variables.
 +                    VariableUtilities.substituteVariables(currentOperator, cloneVarMap, null);
 +                    varMap.putAll(cloneVarMap);
 +
 +                    // Sets the new child.
 +                    childRef = new MutableObject<ILogicalOperator>(newChild);
 +                    currentOperator.getInputs().set(childIndex, childRef);
 +                }
 +
 +                // Recursively eliminate shared operator reference for the operator subtree,
 +                // even if it is a deep copy of some other one.
 +                Map<LogicalVariable, LogicalVariable> childVarMap = eliminateSharedOperatorReference(childRef,
 +                        opRefSet);
 +                // Substitute variables according to the new subtree.
 +                VariableUtilities.substituteVariables(currentOperator, childVarMap, null);
 +
 +                // Updates mapping like <$a, $b> in varMap to <$a, $c>, where there is a mapping <$b, $c>
 +                // in childVarMap.
 +                for (Map.Entry<LogicalVariable, LogicalVariable> entry : varMap.entrySet()) {
 +                    LogicalVariable newVar = childVarMap.get(entry.getValue());
 +                    if (newVar != null) {
 +                        entry.setValue(newVar);
 +                    }
 +                }
 +                varMap.putAll(childVarMap);
 +                ++childIndex;
 +            }
 +
 +            // Only retain live variables for parent operators to substitute variables.
 +            Set<LogicalVariable> liveVars = new HashSet<>();
 +            VariableUtilities.getLiveVariables(currentOperator, liveVars);
 +            varMap.values().retainAll(liveVars);
 +            return varMap;
 +        } catch (AlgebricksException e) {
 +            throw new AsterixException(e);
 +        }
 +    }
 +
 +    /**
 +     * Constructs a subplan operator for a branch in a 

<TRUNCATED>


[50/50] [abbrv] incubator-asterixdb git commit: Move merged files

Posted by im...@apache.org.
Move merged files


Project: http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/commit/d3d24af4
Tree: http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/tree/d3d24af4
Diff: http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/diff/d3d24af4

Branch: refs/heads/master
Commit: d3d24af45eb18db3829bbf2acd0545afdfe2645a
Parents: e928b6a
Author: Ian Maxon <im...@apache.org>
Authored: Wed Apr 6 19:58:48 2016 -0700
Committer: Ian Maxon <im...@apache.org>
Committed: Wed Apr 6 19:58:48 2016 -0700

----------------------------------------------------------------------
 .../impls/AbstractLSMIndexOperationContext.java | 41 ++++++++++++++++++++
 .../impls/AbstractLSMIndexOperationContext.java | 41 --------------------
 2 files changed, 41 insertions(+), 41 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3d24af4/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/AbstractLSMIndexOperationContext.java
----------------------------------------------------------------------
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/AbstractLSMIndexOperationContext.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/AbstractLSMIndexOperationContext.java
new file mode 100644
index 0000000..3b907c3
--- /dev/null
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/AbstractLSMIndexOperationContext.java
@@ -0,0 +1,41 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.hyracks.storage.am.lsm.common.impls;
+
+import org.apache.hyracks.storage.am.lsm.common.api.ILSMIndexOperationContext;
+
+public abstract class AbstractLSMIndexOperationContext implements ILSMIndexOperationContext {
+
+    private boolean accessingComponents = false;
+
+    @Override
+    public boolean isAccessingComponents() {
+        return accessingComponents;
+    }
+
+    @Override
+    public void setAccessingComponents(boolean accessingComponents) {
+        this.accessingComponents = accessingComponents;
+    }
+
+    @Override
+    public void reset() {
+        accessingComponents = false;
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d3d24af4/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/AbstractLSMIndexOperationContext.java
----------------------------------------------------------------------
diff --git a/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/AbstractLSMIndexOperationContext.java b/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/AbstractLSMIndexOperationContext.java
deleted file mode 100644
index 3b907c3..0000000
--- a/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/AbstractLSMIndexOperationContext.java
+++ /dev/null
@@ -1,41 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.hyracks.storage.am.lsm.common.impls;
-
-import org.apache.hyracks.storage.am.lsm.common.api.ILSMIndexOperationContext;
-
-public abstract class AbstractLSMIndexOperationContext implements ILSMIndexOperationContext {
-
-    private boolean accessingComponents = false;
-
-    @Override
-    public boolean isAccessingComponents() {
-        return accessingComponents;
-    }
-
-    @Override
-    public void setAccessingComponents(boolean accessingComponents) {
-        this.accessingComponents = accessingComponents;
-    }
-
-    @Override
-    public void reset() {
-        accessingComponents = false;
-    }
-}


[28/50] [abbrv] incubator-asterixdb git commit: Merge branch 'master' into hyracks-merge2

Posted by im...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/sum_int16_null/sum_int16_null.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/sum_int16_null/sum_int16_null.3.query.sqlpp
index 7b86f76,0000000..e57c3f4
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/sum_int16_null/sum_int16_null.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/sum_int16_null/sum_int16_null.3.query.sqlpp
@@@ -1,26 -1,0 +1,26 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +
 +use test;
 +
 +
- select element test.sum((
++select element test.coll_sum((
 +    select element x.int16Field
 +    from  Numeric as x
 +));

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/sum_int32/sum_int32.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/sum_int32/sum_int32.3.query.sqlpp
index 9c136e2,0000000..6b37fec
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/sum_int32/sum_int32.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/sum_int32/sum_int32.3.query.sqlpp
@@@ -1,26 -1,0 +1,26 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +
 +use test;
 +
 +
- select element test.sum((
++select element test.coll_sum((
 +    select element x
 +    from  [test.int32('1'),test.int32('2'),test.int32('3')] as x
 +));

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/sum_int32_null/sum_int32_null.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/sum_int32_null/sum_int32_null.3.query.sqlpp
index 61984d7,0000000..3c6841e
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/sum_int32_null/sum_int32_null.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/sum_int32_null/sum_int32_null.3.query.sqlpp
@@@ -1,26 -1,0 +1,26 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +
 +use test;
 +
 +
- select element test.sum((
++select element test.coll_sum((
 +    select element x.int32Field
 +    from  Numeric as x
 +));

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/sum_int64/sum_int64.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/sum_int64/sum_int64.3.query.sqlpp
index d35ab0f,0000000..dca4d44
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/sum_int64/sum_int64.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/sum_int64/sum_int64.3.query.sqlpp
@@@ -1,26 -1,0 +1,26 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +
 +use test;
 +
 +
- select element test.sum((
++select element test.coll_sum((
 +    select element x
 +    from  [test.int64('1'),test.int64('2'),test.int64('3')] as x
 +));

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/sum_int64_null/sum_int64_null.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/sum_int64_null/sum_int64_null.3.query.sqlpp
index 189e9f3,0000000..857e15f
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/sum_int64_null/sum_int64_null.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/sum_int64_null/sum_int64_null.3.query.sqlpp
@@@ -1,26 -1,0 +1,26 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +
 +use test;
 +
 +
- select element test.sum((
++select element test.coll_sum((
 +    select element x.int64Field
 +    from  Numeric as x
 +));

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/sum_int8/sum_int8.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/sum_int8/sum_int8.3.query.sqlpp
index 536a03f,0000000..25ed1a4
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/sum_int8/sum_int8.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/sum_int8/sum_int8.3.query.sqlpp
@@@ -1,26 -1,0 +1,26 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +
 +use test;
 +
 +
- select element test.sum((
++select element test.coll_sum((
 +    select element x
 +    from  [test.int8('1'),test.int8('2'),test.int8('3')] as x
 +));

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/sum_int8_null/sum_int8_null.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/sum_int8_null/sum_int8_null.3.query.sqlpp
index 98922f8,0000000..473bb1a
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/sum_int8_null/sum_int8_null.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/sum_int8_null/sum_int8_null.3.query.sqlpp
@@@ -1,26 -1,0 +1,26 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +
 +use test;
 +
 +
- select element test.sum((
++select element test.coll_sum((
 +    select element x.int8Field
 +    from  Numeric as x
 +));

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/sum_mixed/sum_mixed.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/sum_mixed/sum_mixed.3.query.sqlpp
index f4e1265,0000000..b8c98ef
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/sum_mixed/sum_mixed.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/sum_mixed/sum_mixed.3.query.sqlpp
@@@ -1,28 -1,0 +1,28 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +/*
 +* Description  : Run sum over an ordered list with mixed types
 +* Expected Res : Failure
 +* Date         : Jun 2nd 2013
 +*/
 +
- select element sum((
++select element coll_sum((
 +    select element x
 +    from  [float('2.0'),'hello world',93847382783847382,date('2013-01-01')] as x
 +));

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/sum_null-with-pred/sum_null-with-pred.1.ddl.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/sum_null-with-pred/sum_null-with-pred.1.ddl.sqlpp
index 2405389,0000000..66c4b53
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/sum_null-with-pred/sum_null-with-pred.1.ddl.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/sum_null-with-pred/sum_null-with-pred.1.ddl.sqlpp
@@@ -1,39 -1,0 +1,39 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +/*
-  * Description      :   sum() aggregate function must return the numeric sum, when non null values are given as input to sum().
++ * Description      :   coll_sum() aggregate function must return the numeric sum, when non null values are given as input to coll_sum().
 + *                  :   Get the sum for those tuples which are non null for salary fields.
 + * Expected result  :   Success
 + * Date             :   July 20th 2012
 + */
 +
 +drop  database test if exists;
 +create  database test;
 +
 +use test;
 +
 +
 +create type test.TestType as
 +{
 +  id : int64,
 +  sal : int64?
 +}
 +
 +create  table tdst(TestType) primary key id;
 +

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/sum_null-with-pred/sum_null-with-pred.2.update.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/sum_null-with-pred/sum_null-with-pred.2.update.sqlpp
index a3ac94f,0000000..fdc8b57
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/sum_null-with-pred/sum_null-with-pred.2.update.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/sum_null-with-pred/sum_null-with-pred.2.update.sqlpp
@@@ -1,40 -1,0 +1,40 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +/*
-  * Description      :   sum() aggregate function must return the numeric sum, when non null values are given as input to sum().
++ * Description      :   coll_sum() aggregate function must return the numeric sum, when non null values are given as input to coll_sum().
 + *                  :   Get the sum for those tuples which are non null for salary fields.
 + * Expected result  :   Success
 + * Date             :   July 20th 2012
 + */
 +
 +use test;
 +
 +
 +insert into tdst
 +select element {'id':123,'sal':1000};
 +insert into tdst
 +select element {'id':113,'sal':2000};
 +insert into tdst
 +select element {'id':163,'sal':3000};
 +insert into tdst
 +select element {'id':161,'sal':4000};
 +insert into tdst
 +select element {'id':173,'sal':5000};
 +insert into tdst
 +select element {'id':183,'sal':null};

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/sum_null-with-pred/sum_null-with-pred.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/sum_null-with-pred/sum_null-with-pred.3.query.sqlpp
index 4734f53,0000000..0664250
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/sum_null-with-pred/sum_null-with-pred.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/sum_null-with-pred/sum_null-with-pred.3.query.sqlpp
@@@ -1,35 -1,0 +1,35 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +/*
-  * Description      :   sum() aggregate function must return the numeric sum, when non null values are given as input to sum().
++ * Description      :   coll_sum() aggregate function must return the numeric sum, when non null values are given as input to coll_sum().
 + *                  :   Get the sum for those tuples which are non null for salary fields.
 + * Expected result  :   Success
 + * Date             :   July 20th 2012
 + */
 +
 +use test;
 +
 +
 +set "import-private-functions" "true";
 +
- select element test.sum((
++select element test.coll_sum((
 +    select element l.sal
 +    from  tdst as l
 +    where test.not(test."is-null"(l.sal))
 +));

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/sum_numeric_null/sum_numeric_null.1.ddl.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/sum_numeric_null/sum_numeric_null.1.ddl.sqlpp
index ed8b33c,0000000..de795e7
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/sum_numeric_null/sum_numeric_null.1.ddl.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/sum_numeric_null/sum_numeric_null.1.ddl.sqlpp
@@@ -1,38 -1,0 +1,38 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +/*
-  * Description      :   Add numeric values with a null value, sum() aggregate function must return null.
++ * Description      :   Add numeric values with a null value, coll_sum() aggregate function must return null.
 + * Expected result  :   Success
 + * Date             :   July 20th 2012
 + */
 +
 +drop  database test if exists;
 +create  database test;
 +
 +use test;
 +
 +
 +create type test.TestType as
 +{
 +  id : int32,
 +  sal : int32?
 +}
 +
 +create  table tdst(TestType) primary key id;
 +

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/sum_numeric_null/sum_numeric_null.2.update.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/sum_numeric_null/sum_numeric_null.2.update.sqlpp
index 7f6846b,0000000..10f6c7e
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/sum_numeric_null/sum_numeric_null.2.update.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/sum_numeric_null/sum_numeric_null.2.update.sqlpp
@@@ -1,39 -1,0 +1,39 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +/*
-  * Description      :   Add numeric values with a null value, sum() aggregate function must return null.
++ * Description      :   Add numeric values with a null value, coll_sum() aggregate function must return null.
 + * Expected result  :   Success
 + * Date             :   July 20th 2012
 + */
 +
 +use test;
 +
 +
 +insert into tdst
 +select element {'id':123,'sal':345};
 +insert into tdst
 +select element {'id':113,'sal':335};
 +insert into tdst
 +select element {'id':163,'sal':315};
 +insert into tdst
 +select element {'id':161,'sal':365};
 +insert into tdst
 +select element {'id':173,'sal':385};
 +insert into tdst
 +select element {'id':183,'sal':null};

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/sum_numeric_null/sum_numeric_null.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/sum_numeric_null/sum_numeric_null.3.query.sqlpp
index aa3c3da,0000000..7b771d7
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/sum_numeric_null/sum_numeric_null.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate/sum_numeric_null/sum_numeric_null.3.query.sqlpp
@@@ -1,31 -1,0 +1,31 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +/*
-  * Description      :   Add numeric values with a null value, sum() aggregate function must return null.
++ * Description      :   Add numeric values with a null value, coll_sum() aggregate function must return null.
 + * Expected result  :   Success
 + * Date             :   July 20th 2012
 + */
 +
 +use test;
 +
 +
- select element test.sum((
++select element test.coll_sum((
 +    select element l.sal
 +    from  tdst as l
 +));

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/cross-dataverse/cross-dv03/cross-dv03.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/cross-dataverse/cross-dv03/cross-dv03.3.query.sqlpp
index c43f41f,0000000..4e09b53
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/cross-dataverse/cross-dv03/cross-dv03.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/cross-dataverse/cross-dv03/cross-dv03.3.query.sqlpp
@@@ -1,33 -1,0 +1,33 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +/*
 + * Description  : Test cross dataverse functionality
 + *              : use dataverse statement is now optional.
 + *              : Use fully qualified names to create datasets, types.
 + *              : drop datasets using fully qualified names
 + *              : Query metadata to verify datasets are dropped.
 + * Expected Res : Success
 + * Date         : 28th Aug 2012
 + */
 +
- select element count((
++select element coll_count((
 +    select element l
 +    from  "Metadata.Dataset" as l
 +    where ((l.DataverseName = 'student') or (l.DataverseName = 'teacher'))
 +));

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/custord/freq-clerk/freq-clerk.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/custord/freq-clerk/freq-clerk.3.query.sqlpp
index 157efa5,0000000..8ffe029
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/custord/freq-clerk/freq-clerk.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/custord/freq-clerk/freq-clerk.3.query.sqlpp
@@@ -1,29 -1,0 +1,29 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +
 +use test;
 +
 +
 +select element {'clerk':clerk,'ordercount':count}
 +from  CustomerOrders as c,
 +      c.orders as o
 +group by o.clerk as clerk
- with  count as test.count(o)
++with  count as count(o)
 +order by count,clerk desc
 +;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/custord/join_q_06/join_q_06.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/custord/join_q_06/join_q_06.3.query.sqlpp
index c9e9876,0000000..413ab84
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/custord/join_q_06/join_q_06.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/custord/join_q_06/join_q_06.3.query.sqlpp
@@@ -1,30 -1,0 +1,30 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +
 +USE test;
 +
 +
 +WITH customer AS (SELECT ELEMENT c FROM Customers c)
 +
 +SELECT c.name AS cust_name,
 +       c.cashBack AS cust_cashBack
- FROM customer c JOIN [min((SELECT ELEMENT c.cashBack FROM customer c))] as min_cashBack
++FROM customer c JOIN [coll_min((SELECT ELEMENT c.cashBack FROM customer c))] as min_cashBack
 +     ON c.cashBack = min_cashBack
 +ORDER BY c.cid, c.name
 +;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/custord/join_q_07/join_q_07.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/custord/join_q_07/join_q_07.3.query.sqlpp
index 05c71c2,0000000..5f4f07f
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/custord/join_q_07/join_q_07.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/custord/join_q_07/join_q_07.3.query.sqlpp
@@@ -1,35 -1,0 +1,35 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +
 +/*
 + * This query is expected to fail because variable c
 + * is undefined for subquery "(SELECT ELEMENT c.cashBack FROM c c)".
 + */
 +
 +USE test;
 +
 +
 +WITH customer AS (SELECT ELEMENT c FROM Customers c)
 +
 +SELECT c.name AS cust_name,
 +       c.cashBack AS cust_cashBack
- FROM customer c JOIN [min((SELECT ELEMENT c.cashBack FROM c c))] as min_cashBack
++FROM customer c JOIN [coll_min((SELECT ELEMENT c.cashBack FROM c c))] as min_cashBack
 +     ON c.cashBack = min_cashBack
 +ORDER BY c.cid, c.name
 +;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/dapd/q2/q2.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/dapd/q2/q2.3.query.sqlpp
index 3b64539,0000000..f88e846
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/dapd/q2/q2.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/dapd/q2/q2.3.query.sqlpp
@@@ -1,36 -1,0 +1,36 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +
 +use test;
 +
 +
 +select element {'sig_id':sig_id,'total_count':sig_sponsorship_count,'chapter_breakdown':by_chapter}
 +from  Event as event,
 +      event.sponsoring_sigs as sponsor
 +with  es as {'event':event,'sponsor':sponsor}
 +group by sponsor.sig_id as sig_id
- with  sig_sponsorship_count as test.count(es),
++with  sig_sponsorship_count as count(es),
 +      by_chapter as (
-       select element {'chapter_name':chapter_name,'escount':test.count(e)}
++      select element {'chapter_name':chapter_name,'escount':count(e)}
 +      from  es as e
 +      group by e.sponsor.chapter_name as chapter_name
 +  )
 +order by sig_sponsorship_count desc
 +limit 5
 +;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/dml/query-issue382/query-issue382.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/dml/query-issue382/query-issue382.3.query.sqlpp
index a471812,0000000..c49b436
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/dml/query-issue382/query-issue382.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/dml/query-issue382/query-issue382.3.query.sqlpp
@@@ -1,26 -1,0 +1,26 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +
 +use SocialNetworkData;
 +
 +
- select element SocialNetworkData.count((
++select element SocialNetworkData.coll_count((
 +    select element h
 +    from  HandbookUsers as h
 +));

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/external-indexing/rc-format/rc-format.1.ddl.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/external-indexing/rc-format/rc-format.1.ddl.sqlpp
index b14f3cb,0000000..4736dee
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/external-indexing/rc-format/rc-format.1.ddl.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/external-indexing/rc-format/rc-format.1.ddl.sqlpp
@@@ -1,43 -1,0 +1,49 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +/*
 + * Description  : Create an external dataset that contains records stored with rc file format.
 + *                Build an index over the external dataset age attribute
 + *                Perform a query over the dataset using the index.
 + * Expected Res : Success
 + * Date         : 3rd Jan 2014
 +*/
 +
 +drop  database test if exists;
 +create  database test;
 +
 +use test;
 +
 +
 +create type test.EmployeeType as
 + closed {
 +  id : int64,
 +  name : string,
 +  age : int64
 +}
 +
- create external  table EmployeeDataset(EmployeeType) using "hdfs"(("hdfs"="hdfs://127.0.0.1:31888"),("path"="/asterix/external-indexing-test.rc"),("input-format"="rc-input-format"),("format"="binary"),("parser"="hive-parser"),("hive-serde"="org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe"));
++create external  table EmployeeDataset(EmployeeType) using "hdfs"(
++("hdfs"="hdfs://127.0.0.1:31888"),
++("path"="/asterix/external-indexing-test.rc"),
++("input-format"="rc-input-format"),
++("format"="hdfs-writable"),
++("parser"="hive-parser"),
++("hive-serde"="org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe"));
 +
 +create  index EmployeeAgeIdx  on EmployeeDataset (age) type btree;
 +

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/failure/q01_pricing_summary_report_failure/q01_pricing_summary_report_failure.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/failure/q01_pricing_summary_report_failure/q01_pricing_summary_report_failure.3.query.sqlpp
index 38d2c9d,0000000..cac4a08
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/failure/q01_pricing_summary_report_failure/q01_pricing_summary_report_failure.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/failure/q01_pricing_summary_report_failure/q01_pricing_summary_report_failure.3.query.sqlpp
@@@ -1,49 -1,0 +1,49 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +
 +use tpch;
 +
 +
- select element {'l_returnflag':l_returnflag,'l_linestatus':l_linestatus,'sum_qty':tpch.sum((
++select element {'l_returnflag':l_returnflag,'l_linestatus':l_linestatus,'sum_qty':tpch.coll_sum((
 +        select element i.l_quantity
 +        from  l as i
-     )),'sum_base_price':tpch.sum((
++    )),'sum_base_price':tpch.coll_sum((
 +        select element i.l_extendedprice
 +        from  l as i
-     )),'sum_disc_price':tpch.sum((
++    )),'sum_disc_price':tpch.coll_sum((
 +        select element (i.l_extendedprice * (1 - i.l_discount))
 +        from  l as i
-     )),'sum_charge':tpch.sum((
++    )),'sum_charge':tpch.coll_sum((
 +        select element (i.l_extendedprice * (1 - i.l_discount) * (1 + i.l_tax))
 +        from  l as i
-     )),'ave_qty':tpch.avg((
++    )),'ave_qty':tpch.coll_avg((
 +        select element i.l_quantity
 +        from  l as i
-     )),'ave_price':tpch.avg((
++    )),'ave_price':tpch.coll_avg((
 +        select element i.l_extendedprice
 +        from  l as i
-     )),'ave_disc':tpch.avg((
++    )),'ave_disc':tpch.coll_avg((
 +        select element i.l_discount
 +        from  l as i
-     )),'count_order':tpch.count(l)}
++    )),'count_order':tpch.coll_count(l)}
 +from  LineItem as l
 +/* +hash */
 +group by l.l_returnflag as l_returnflag,l.l_linestatus as l_linestatus
 +order by l_returnflag,l_linestatus
 +;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/feeds/feeds_05/feeds_05.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/feeds/feeds_05/feeds_05.3.query.sqlpp
index a7303aa,0000000..ba6c676
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/feeds/feeds_05/feeds_05.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/feeds/feeds_05/feeds_05.3.query.sqlpp
@@@ -1,30 -1,0 +1,30 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +/*
 + * Description  : Create a feed dataset that uses the synthetic feed simulator adapter.
 +
 +use feeds;
 +
 +
- if ((feeds.count((
++if ((feeds.coll_count((
 +      select element x
 +      from  SyntheticTweets as x
 +  )) > 0))
 +then 1
 +else 0;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/feeds/feeds_07/feeds_07.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/feeds/feeds_07/feeds_07.3.query.sqlpp
index 64e1181,0000000..7760608
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/feeds/feeds_07/feeds_07.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/feeds/feeds_07/feeds_07.3.query.sqlpp
@@@ -1,30 -1,0 +1,30 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +/*
 + * Description  : Create a feed using the synthetic feed simulator adapter.
 +
 +use feeds;
 +
 +
- if ((feeds.count((
++if ((feeds.coll_count((
 +      select element x
 +      from  SyntheticTweets as x
 +  )) > 0))
 +then 1
 +else 0;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/feeds/feeds_08/feeds_08.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/feeds/feeds_08/feeds_08.3.query.sqlpp
index 64e1181,0000000..7760608
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/feeds/feeds_08/feeds_08.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/feeds/feeds_08/feeds_08.3.query.sqlpp
@@@ -1,30 -1,0 +1,30 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +/*
 + * Description  : Create a feed using the synthetic feed simulator adapter.
 +
 +use feeds;
 +
 +
- if ((feeds.count((
++if ((feeds.coll_count((
 +      select element x
 +      from  SyntheticTweets as x
 +  )) > 0))
 +then 1
 +else 0;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/feeds/feeds_09/feeds_09.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/feeds/feeds_09/feeds_09.3.query.sqlpp
index e5d9ea5,0000000..3b4fbad
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/feeds/feeds_09/feeds_09.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/feeds/feeds_09/feeds_09.3.query.sqlpp
@@@ -1,30 -1,0 +1,30 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +/*
 + * Description  : Create a feed using the synthetic feed simulator adapter.
 +
 +use feeds_09;
 +
 +
- if ((feeds_09.count((
++if ((feeds_09.coll_count((
 +      select element x
 +      from  SyntheticTweets as x
 +  )) > 0))
 +then 1
 +else 0;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/feeds/feeds_10/feeds_10.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/feeds/feeds_10/feeds_10.3.query.sqlpp
index 350a632,0000000..b6d486c
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/feeds/feeds_10/feeds_10.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/feeds/feeds_10/feeds_10.3.query.sqlpp
@@@ -1,29 -1,0 +1,29 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +/*
 + * Description  : Create a dataset with a secondary btree index.
 +
 +use feeds_10;
 +
 +
- select element feeds_10.count((
++select element feeds_10.coll_count((
 +    select element x
 +    from  Tweets as x
 +    order by x.id
 +));

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/flwor/grpby01/grpby01.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/flwor/grpby01/grpby01.3.query.sqlpp
index 8f778b4,0000000..5b44875
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/flwor/grpby01/grpby01.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/flwor/grpby01/grpby01.3.query.sqlpp
@@@ -1,32 -1,0 +1,32 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +/*
 + * Description      :  Test group by clause of the FLWOR expression
 + * Expected Result  :  Success
 + * Date             :  31st July 2012
 + */
 +
- select element {'store-number':strNum,'total-qty':sum((
++select element {'store-number':strNum,'total-qty':coll_sum((
 +        select element l.qty
 +        from  sales as l
 +    ))}
 +from  [{'storeno':'S101','itemno':'P78395','qty':125},{'storeno':'S101','itemno':'P71395','qty':135},{'storeno':'S102','itemno':'P78395','qty':225},{'storeno':'S103','itemno':'P78345','qty':105},{'storeno':'S104','itemno':'P71395','qty':115},{'storeno':'S105','itemno':'P74395','qty':120}] as sales
 +group by sales.storeno as strNum
 +order by strNum desc
 +;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/flwor/grpby02/grpby02.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/flwor/grpby02/grpby02.3.query.sqlpp
index 8f778b4,0000000..5b44875
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/flwor/grpby02/grpby02.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/flwor/grpby02/grpby02.3.query.sqlpp
@@@ -1,32 -1,0 +1,32 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +/*
 + * Description      :  Test group by clause of the FLWOR expression
 + * Expected Result  :  Success
 + * Date             :  31st July 2012
 + */
 +
- select element {'store-number':strNum,'total-qty':sum((
++select element {'store-number':strNum,'total-qty':coll_sum((
 +        select element l.qty
 +        from  sales as l
 +    ))}
 +from  [{'storeno':'S101','itemno':'P78395','qty':125},{'storeno':'S101','itemno':'P71395','qty':135},{'storeno':'S102','itemno':'P78395','qty':225},{'storeno':'S103','itemno':'P78345','qty':105},{'storeno':'S104','itemno':'P71395','qty':115},{'storeno':'S105','itemno':'P74395','qty':120}] as sales
 +group by sales.storeno as strNum
 +order by strNum desc
 +;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/fuzzyjoin/dblp-1_1/dblp-1_1.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/fuzzyjoin/dblp-1_1/dblp-1_1.3.query.sqlpp
index e52dcb4,0000000..54f431b
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/fuzzyjoin/dblp-1_1/dblp-1_1.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/fuzzyjoin/dblp-1_1/dblp-1_1.3.query.sqlpp
@@@ -1,30 -1,0 +1,30 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +
 +use fuzzyjoin;
 +
 +
 +set "import-private-functions" "true";
 +
 +select element tokenGroupped
 +from  DBLP as paper,
 +      fuzzyjoin."counthashed-word-tokens"(paper.title) as token
 +group by token as tokenGroupped
- order by fuzzyjoin.count(paper),tokenGroupped
++order by count(paper),tokenGroupped
 +;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/fuzzyjoin/dblp-2_5.3/dblp-2_5.3.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/fuzzyjoin/dblp-2_5.3/dblp-2_5.3.3.query.sqlpp
index 8aad0db,0000000..2dca121
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/fuzzyjoin/dblp-2_5.3/dblp-2_5.3.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/fuzzyjoin/dblp-2_5.3/dblp-2_5.3.3.query.sqlpp
@@@ -1,46 -1,0 +1,46 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +
 +use fuzzyjoin;
 +
 +
 +set "import-private-functions" "true";
 +
 +select element {'id':idDBLP,'len':lenDBLP,'tokens':tokensDBLP}
 +from  DBLP as paperDBLP
 +with  idDBLP as paperDBLP.id,
 +      tokensUnrankedDBLP as fuzzyjoin."counthashed-word-tokens"(paperDBLP.title),
 +      lenDBLP as fuzzyjoin.len(tokensUnrankedDBLP),
 +      tokensDBLP as (
 +      select element i
 +      from  tokensUnrankedDBLP as tokenUnranked,
 +            (
 +          select element tokenGrouped
 +          from  DBLP as paper,
 +                fuzzyjoin."counthashed-word-tokens"(paper.title) as token
 +          with  id as paper.id
 +          /* +hash */
 +          group by token as tokenGrouped
-           order by fuzzyjoin.count(id),tokenGrouped
++          order by count(id),tokenGrouped
 +      ) as tokenRanked at i
 +      where (tokenUnranked = tokenRanked)
 +      order by i
 +  )
 +order by idDBLP
 +;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/group-by/core-01/core-01.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/group-by/core-01/core-01.3.query.sqlpp
index 3e16b8c,0000000..dc7ac16
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/group-by/core-01/core-01.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/group-by/core-01/core-01.3.query.sqlpp
@@@ -1,29 -1,0 +1,29 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +
 +USE gby;
 +
 +
 +FROM Employee e
 +GROUP BY e.deptno AS deptno GROUP AS g(e AS e)
 +SELECT ELEMENT {
 +  'deptno': deptno,
-   'avgpay': avg( (FROM g AS i SELECT ELEMENT i.e.salary) ),
++  'avgpay': coll_avg( (FROM g AS i SELECT ELEMENT i.e.salary) ),
 +  'workers': (FROM g AS i SELECT ELEMENT  {'name': i.e.name, 'salary': i.e.salary})
 +};

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/group-by/core-02/core-02.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/group-by/core-02/core-02.3.query.sqlpp
index f2ce8b7,0000000..70afc46
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/group-by/core-02/core-02.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/group-by/core-02/core-02.3.query.sqlpp
@@@ -1,30 -1,0 +1,30 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +
 +USE gby;
 +
 +FROM Employee e
 +    JOIN Incentive i ON e.job_category = i.job_category
 +    JOIN SuperStars s ON e.id = s.id
 +GROUP BY e.department_id AS deptId
 +GROUP AS eis(e AS e, i AS i, s AS s)
 +SELECT ELEMENT {
 +      'deptId': deptId,
-       'star_cost': sum( (FROM eis AS p SELECT ELEMENT p.e.salary + p.i.bonus) )
++      'star_cost': coll_sum( (FROM eis AS p SELECT ELEMENT p.e.salary + p.i.bonus) )
 +};

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/group-by/core-03/core-02.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/group-by/core-03/core-02.3.query.sqlpp
index d0ac45b,0000000..8e6b987
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/group-by/core-03/core-02.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/group-by/core-03/core-02.3.query.sqlpp
@@@ -1,40 -1,0 +1,40 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +
 +USE gby;
 +
 +FROM Employee e
 +    JOIN Incentive i ON e.job_category = i.job_category
 +    JOIN SuperStars s ON e.id = s.id
 +GROUP BY e.department_id AS deptId
 +GROUP AS eis(e AS e, i AS i, s AS s)
 +SELECT ELEMENT {
 +      'deptId': deptId,
-       'avgpay': avg( (FROM eis AS g SELECT ELEMENT g.e.salary + g.i.bonus) ),
++      'avgpay': coll_avg( (FROM eis AS g SELECT ELEMENT g.e.salary + g.i.bonus) ),
 +      'topstar_details':
 +           (
 +                FROM eis AS g
 +                SELECT ELEMENT {
 +                    'id': g.e.id,
 +                    'salary': g.e.salary,
 +                    'bonus': g.i.bonus
 +                }
 +                ORDER BY g.i.bonus DESC LIMIT 3
 +           )
 +};

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/group-by/core-05/core-05.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/group-by/core-05/core-05.3.query.sqlpp
index 11c6c6f,0000000..e647ff0
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/group-by/core-05/core-05.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/group-by/core-05/core-05.3.query.sqlpp
@@@ -1,47 -1,0 +1,47 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +
 +USE gby;
 +
 +SELECT ELEMENT {
 +        'Vehicle prices including 4 tires':
 +        (   FROM Car c JOIN Tire t ON c.tire_size = t.size
 +            GROUP BY c.tire_size AS tire_size GROUP AS g(c AS c, t AS t)
 +            SELECT ELEMENT {
 +                'tire_size': tire_size,
-                 'avg_total_price': avg(
++                'avg_total_price': coll_avg(
 +                                         (  FROM g AS g
 +                                            SELECT ELEMENT g.c.price + 4 * g.t.price
 +                                          )
 +                                       ),
 +                'combinations':
 +                            ( FROM g AS g
 +                              SELECT ELEMENT {
 +                                    'make': g.c.make,
 +                                    'model': g.c.model,
 +                                    'mfr': g.t.mfr,
 +                                    'brand': g.t.brand,
 +                                    'price': g.c.price + 4 * g.t.price
 +                               }
 +                               ORDER BY g.c.make, g.c.model, g.t.mfr, g.t.brand
 +                            )
 +              }
 +        )
 +    };
 +

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/group-by/sugar-01/sugar-01.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/group-by/sugar-01/sugar-01.3.query.sqlpp
index 79758e3,0000000..c695e87
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/group-by/sugar-01/sugar-01.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/group-by/sugar-01/sugar-01.3.query.sqlpp
@@@ -1,26 -1,0 +1,26 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +
 +USE gby;
 +
 +
 +FROM Employee e
 +GROUP BY e.deptno AS deptno GROUP AS g
- SELECT deptno AS deptno, avg(e.salary) AS avgpay,
++SELECT deptno AS deptno, coll_avg(e.salary) AS avgpay,
 +       (SELECT i.e.name AS name, i.e.salary AS salary FROM g AS i) AS workers;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/group-by/sugar-02/sugar-02.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/group-by/sugar-02/sugar-02.3.query.sqlpp
index b488a32,0000000..e9c3326
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/group-by/sugar-02/sugar-02.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/group-by/sugar-02/sugar-02.3.query.sqlpp
@@@ -1,27 -1,0 +1,27 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +
 +USE gby;
 +
 +FROM Employee e
 +    JOIN Incentive i ON e.job_category = i.job_category
 +    JOIN SuperStars s ON e.id = s.id
 +GROUP BY e.department_id AS deptId
- SELECT deptId as deptId, sum(e.salary + i.bonus) AS star_cost;
++SELECT deptId as deptId, coll_sum(e.salary + i.bonus) AS star_cost;
 +

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/group-by/sugar-03/sugar-03.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/group-by/sugar-03/sugar-03.3.query.sqlpp
index 49dd2c0,0000000..c747382
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/group-by/sugar-03/sugar-03.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/group-by/sugar-03/sugar-03.3.query.sqlpp
@@@ -1,32 -1,0 +1,32 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +
 +USE gby;
 +
 +FROM Employee e
 +    JOIN Incentive i ON e.job_category = i.job_category
 +    JOIN SuperStars s ON e.id = s.id
 +GROUP BY e.department_id AS deptId GROUP AS eis
 +SELECT deptId as deptId,
-     avg(e.salary + i.bonus) AS avgpay,
++    coll_avg(e.salary + i.bonus) AS avgpay,
 +    (FROM eis AS v
 +         SELECT v.e.id AS id, v.e.salary AS salary, v.i.bonus AS bonus
 +         ORDER BY v.i.bonus DESC LIMIT 3
 +    ) AS topstar_details
 +    ;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/group-by/sugar-05/sugar-05.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/group-by/sugar-05/sugar-05.3.query.sqlpp
index 1a7a2bf,0000000..45c3528
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/group-by/sugar-05/sugar-05.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/group-by/sugar-05/sugar-05.3.query.sqlpp
@@@ -1,38 -1,0 +1,38 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +
 +USE gby;
 +
 +SELECT ELEMENT {
 +        'Vehicle prices including 4 tires':
 +        (
 +            FROM Car c JOIN Tire t ON c.tire_size = t.size
 +            GROUP BY c.tire_size AS tire_size GROUP AS g
 +            SELECT tire_size AS tire_size,
-                    avg(c.price + 4 * t.price) AS avg_total_price,
++                   coll_avg(c.price + 4 * t.price) AS avg_total_price,
 +                   ( FROM g AS g
 +                     SELECT g.c.make AS make,
 +                            g.c.model AS model,
 +                            g.t.mfr AS mfr,
 +                            g.t.brand AS brand,
 +                            g.c.price + 4 * g.t.price AS price
 +                     ORDER BY g.c.make, g.c.model, g.t.mfr, g.t.brand
 +                   ) AS combinations
 +        )
 +    };

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/hdfs/hdfs_02/hdfs_02.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/hdfs/hdfs_02/hdfs_02.3.query.sqlpp
index 04c502a,0000000..c56d747
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/hdfs/hdfs_02/hdfs_02.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/hdfs/hdfs_02/hdfs_02.3.query.sqlpp
@@@ -1,34 -1,0 +1,34 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +/*
 +* Description  : Create an external dataset that contains a tuples, the lines from a (*sequence*) file in HDFS.
 +                 Perform a word-count over the data in the dataset.
 +* Expected Res : Success
 +* Date         : 7th Jan 2013
 +*/
 +
 +use test;
 +
 +
- select element {'word':tok,'count':test.count(token)}
++select element {'word':tok,'count':test.coll_count(token)}
 +from  TextDataset as line,
 +      test."word-tokens"(line.content) as token
 +group by token as tok
 +order by tok
 +;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/hdfs/hdfs_03/hdfs_03.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/hdfs/hdfs_03/hdfs_03.3.query.sqlpp
index a83f6b9,0000000..626c849
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/hdfs/hdfs_03/hdfs_03.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/hdfs/hdfs_03/hdfs_03.3.query.sqlpp
@@@ -1,36 -1,0 +1,36 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +/*
 +* Description  : Create an external dataset that contains a tuples, the lines from a large (35kb) text file in HDFS.
 +                 The input file is sufficiently large to guarantee that # of bytes > than internal buffer of size 8192.
 +                 This causes a record to span across the buffer size boundaries.
 +                 Perform a word-count over the data in the dataset.
 +* Expected Res : Success
 +* Date         : 7th Jan 2013
 +*/
 +
 +use test;
 +
 +
- select element {'word':tok,'count':test.count(token)}
++select element {'word':tok,'count':test.coll_count(token)}
 +from  TextDataset as line,
 +      test."word-tokens"(line.content) as token
 +group by token as tok
 +order by tok
 +;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/hdfs/hdfs_shortcircuit/hdfs_shortcircuit.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/hdfs/hdfs_shortcircuit/hdfs_shortcircuit.3.query.sqlpp
index 158e312,0000000..d054927
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/hdfs/hdfs_shortcircuit/hdfs_shortcircuit.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/hdfs/hdfs_shortcircuit/hdfs_shortcircuit.3.query.sqlpp
@@@ -1,35 -1,0 +1,35 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +/*
 +* Description  : Create an external dataset that contains a tuples, the lines from a (*sequence*) file in HDFS.
 +                 Perform a word-count over the data in the dataset.
 +                 The external dataset is set to perform local reads (but this is not checked)
 +* Expected Res : Success
 +* Date         : 6th Mar 2015
 +*/
 +
 +use test;
 +
 +
- select element {'word':tok,'count':test.count(token)}
++select element {'word':tok,'count':test.coll_count(token)}
 +from  TextDataset as line,
 +      test."word-tokens"(line.content) as token
 +group by token as tok
 +order by tok
 +;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/hints/issue_251_dataset_hint_6/issue_251_dataset_hint_6.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/hints/issue_251_dataset_hint_6/issue_251_dataset_hint_6.3.query.sqlpp
index 8c3eff1,0000000..a51e09a
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/hints/issue_251_dataset_hint_6/issue_251_dataset_hint_6.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/hints/issue_251_dataset_hint_6/issue_251_dataset_hint_6.3.query.sqlpp
@@@ -1,35 -1,0 +1,35 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +/*
 +* Description  : Create an external dataset that contains a tuples, the lines from a (*sequence*) file in HDFS.
 +                 Provide hint(cardinality) when creating the dataset.
 +                 Perform a word-count over the data in the dataset.
 +* Expected Res : Success
 +* Date         : 30th Jan 2013
 +*/
 +
 +use test;
 +
 +
- select element {'word':tok,'count':test.count(token)}
++select element {'word':tok,'count':test.coll_count(token)}
 +from  TextDataset as line,
 +      test."word-tokens"(line.content) as token
 +group by token as tok
 +order by tok
 +;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/list/listify_03/listify_03.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/list/listify_03/listify_03.3.query.sqlpp
index 8f6444e,0000000..5e73b39
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/list/listify_03/listify_03.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/list/listify_03/listify_03.3.query.sqlpp
@@@ -1,34 -1,0 +1,34 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +/*
 + * Description      :  Test that a listify on a nullable type creates a homogeneous list of type ANY.
 + *                     Guards against regression to issue 186.
 + * Expected Result  :  Success
 + */
 +
 +use test;
 +
 +
- select element test.min(y)
++select element coll_min(y)
 +from  [1,2] as x
 +with  y as (
-       select element test.min(i)
++      select element coll_min(i)
 +      from  [[1,2,3],[10,20,30],[-2,-5,0]] as i
 +  )
 +;



[32/50] [abbrv] incubator-asterixdb git commit: Merge branch 'master' into hyracks-merge2

Posted by im...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_empty_01/avg_empty_01.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_empty_01/avg_empty_01.3.query.sqlpp
index 47e1388,0000000..104ce93
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_empty_01/avg_empty_01.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_empty_01/avg_empty_01.3.query.sqlpp
@@@ -1,32 -1,0 +1,32 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +/*
-  * Description    : Tests that sql-avg aggregation correctly returns null for an empty stream,
++ * Description    : Tests that coll_sql-avg aggregation correctly returns null for an empty stream,
 + *                  without an aggregate combiner.
 + * Success        : Yes
 + */
 +
 +use test;
 +
 +
- select element test."sql-avg"((
++select element test."coll_sql-avg"((
 +    select element x
 +    from  [1,2,3] as x
 +    where (x > 10)
 +));

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_empty_02/avg_empty_02.1.ddl.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_empty_02/avg_empty_02.1.ddl.sqlpp
index 360c6db,0000000..345dc40
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_empty_02/avg_empty_02.1.ddl.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_empty_02/avg_empty_02.1.ddl.sqlpp
@@@ -1,38 -1,0 +1,38 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +/*
-  * Description    : Tests that sql-avg aggregation correctly returns null for an empty stream,
++ * Description    : Tests that coll_sql-avg aggregation correctly returns null for an empty stream,
 + *                  with an aggregate combiner.
 + * Success        : Yes
 + */
 +
 +drop  database test if exists;
 +create  database test;
 +
 +use test;
 +
 +
 +create type test.TestType as
 + closed {
 +  id : int64,
 +  val : double
 +}
 +
 +create  table Test(TestType) primary key id;
 +

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_empty_02/avg_empty_02.2.update.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_empty_02/avg_empty_02.2.update.sqlpp
index c0377fb,0000000..ca013ae
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_empty_02/avg_empty_02.2.update.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_empty_02/avg_empty_02.2.update.sqlpp
@@@ -1,24 -1,0 +1,24 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +/*
-  * Description    : Tests that sql-avg aggregation correctly returns null for an empty stream,
++ * Description    : Tests that coll_sql-avg aggregation correctly returns null for an empty stream,
 + *                  with an aggregate combiner.
 + * Success        : Yes
 + */
 +

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_empty_02/avg_empty_02.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_empty_02/avg_empty_02.3.query.sqlpp
index 2163d70,0000000..e8a4500
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_empty_02/avg_empty_02.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_empty_02/avg_empty_02.3.query.sqlpp
@@@ -1,31 -1,0 +1,31 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +/*
-  * Description    : Tests that sql-avg aggregation correctly returns null for an empty stream,
++ * Description    : Tests that coll_sql-avg aggregation correctly returns null for an empty stream,
 + *                  with an aggregate combiner.
 + * Success        : Yes
 + */
 +
 +use test;
 +
 +
- select element test."sql-avg"((
++select element test."coll_sql-avg"((
 +    select element x.val
 +    from  Test as x
 +));

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_float/avg_float.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_float/avg_float.3.query.sqlpp
index 211efca,0000000..fd5e040
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_float/avg_float.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_float/avg_float.3.query.sqlpp
@@@ -1,26 -1,0 +1,26 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +
 +use test;
 +
 +
- select element test."sql-avg"((
++select element test."coll_sql-avg"((
 +    select element x
 +    from  [test.float('1'),test.float('2'),test.float('3')] as x
 +));

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_float_null/avg_float_nu.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_float_null/avg_float_nu.3.query.sqlpp
index e2005b5,0000000..ad9089c
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_float_null/avg_float_nu.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_float_null/avg_float_nu.3.query.sqlpp
@@@ -1,26 -1,0 +1,26 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +
 +use test;
 +
 +
- {'average':test."sql-avg"((
++{'average':test."coll_sql-avg"((
 +    select element x.floatField
 +    from  Numeric as x
 +))};

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_int16/avg_int16.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_int16/avg_int16.3.query.sqlpp
index 36ee77a,0000000..0733772
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_int16/avg_int16.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_int16/avg_int16.3.query.sqlpp
@@@ -1,26 -1,0 +1,26 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +
 +use test;
 +
 +
- select element test."sql-avg"((
++select element test."coll_sql-avg"((
 +    select element x
 +    from  [test.int16('1'),test.int16('2'),test.int16('3')] as x
 +));

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_int16_null/avg_int16_null.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_int16_null/avg_int16_null.3.query.sqlpp
index 727455a,0000000..b3f76ed
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_int16_null/avg_int16_null.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_int16_null/avg_int16_null.3.query.sqlpp
@@@ -1,26 -1,0 +1,26 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +
 +use test;
 +
 +
- {'average':test."sql-avg"((
++{'average':test."coll_sql-avg"((
 +    select element x.int16Field
 +    from  Numeric as x
 +))};

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_int32/avg_int32.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_int32/avg_int32.3.query.sqlpp
index f7837ea,0000000..cb91b1f
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_int32/avg_int32.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_int32/avg_int32.3.query.sqlpp
@@@ -1,26 -1,0 +1,26 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +
 +use test;
 +
 +
- select element test."sql-avg"((
++select element test."coll_sql-avg"((
 +    select element x
 +    from  [test.int32('1'),test.int32('2'),test.int32('3')] as x
 +));

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_int32_null/avg_int32_null.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_int32_null/avg_int32_null.3.query.sqlpp
index 41faea7,0000000..90a8437
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_int32_null/avg_int32_null.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_int32_null/avg_int32_null.3.query.sqlpp
@@@ -1,26 -1,0 +1,26 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +
 +use test;
 +
 +
- {'average':test."sql-avg"((
++{'average':test."coll_sql-avg"((
 +    select element x.int32Field
 +    from  Numeric as x
 +))};

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_int64/avg_int64.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_int64/avg_int64.3.query.sqlpp
index 6694f52,0000000..0fb0b98
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_int64/avg_int64.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_int64/avg_int64.3.query.sqlpp
@@@ -1,26 -1,0 +1,26 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +
 +use test;
 +
 +
- select element test."sql-avg"((
++select element test."coll_sql-avg"((
 +    select element x
 +    from  [test.int64('1'),test.int64('2'),test.int64('3')] as x
 +));

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_int64_null/avg_int64_null.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_int64_null/avg_int64_null.3.query.sqlpp
index bc44cba,0000000..fd4902c
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_int64_null/avg_int64_null.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_int64_null/avg_int64_null.3.query.sqlpp
@@@ -1,26 -1,0 +1,26 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +
 +use test;
 +
 +
- {'average':test."sql-avg"((
++{'average':test."coll_sql-avg"((
 +    select element x.int64Field
 +    from  Numeric as x
 +))};

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_int8/avg_int8.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_int8/avg_int8.3.query.sqlpp
index 7acc346,0000000..10e04d4
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_int8/avg_int8.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_int8/avg_int8.3.query.sqlpp
@@@ -1,26 -1,0 +1,26 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +
 +use test;
 +
 +
- select element test."sql-avg"((
++select element test."coll_sql-avg"((
 +    select element x
 +    from  [test.int8('1'),test.int8('2'),test.int8('3')] as x
 +));

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_int8_null/avg_int8_null.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_int8_null/avg_int8_null.3.query.sqlpp
index 89f1dc9,0000000..8481f91
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_int8_null/avg_int8_null.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_int8_null/avg_int8_null.3.query.sqlpp
@@@ -1,26 -1,0 +1,26 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +
 +use test;
 +
 +
- {'average':test."sql-avg"((
++{'average':test."coll_sql-avg"((
 +    select element x.int8Field
 +    from  Numeric as x
 +))};

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.1.ddl.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.1.ddl.sqlpp
index 5378c3e,0000000..fff702e
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.1.ddl.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.1.ddl.sqlpp
@@@ -1,24 -1,0 +1,24 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +/*
- * Description  : Run sql-avg over an ordered list with mixed types
++* Description  : Run coll_sql-avg over an ordered list with mixed types
 +* Expected Res : Failure
 +* Date         : Feb 7th 2014
 +*/
 +

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.3.query.sqlpp
index f02d73b,0000000..5132c56
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/avg_mixed/avg_mixed.3.query.sqlpp
@@@ -1,28 -1,0 +1,28 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +/*
- * Description  : Run sql-avg over an ordered list with mixed types
++* Description  : Run coll_sql-avg over an ordered list with mixed types
 +* Expected Res : Failure
 +* Date         : Feb 7th 2014
 +*/
 +
- select element "sql-avg"((
++select element "coll_sql-avg"((
 +    select element x
 +    from  [float('2.0'),'hello world',93847382783847382,date('2013-01-01')] as x
 +));

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/count_01/count_01.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/count_01/count_01.3.query.sqlpp
index 47ed1d8,0000000..bab28c3
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/count_01/count_01.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/count_01/count_01.3.query.sqlpp
@@@ -1,26 -1,0 +1,26 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +
 +use test;
 +
 +
- select element test."sql-count"((
++select element test."coll_sql-count"((
 +    select element x
 +    from  [1,2,3] as x
 +));

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/count_empty_01/count_empty_01.1.ddl.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/count_empty_01/count_empty_01.1.ddl.sqlpp
index 34b3e00,0000000..4682857
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/count_empty_01/count_empty_01.1.ddl.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/count_empty_01/count_empty_01.1.ddl.sqlpp
@@@ -1,30 -1,0 +1,30 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +/*
-  * Description    : Tests that sql-count aggregation correctly returns 0 for an empty stream,
++ * Description    : Tests that coll_sql-count aggregation correctly returns 0 for an empty stream,
 + *                  without an aggregate combiner.
 + * Success        : Yes
 + */
 +
 +drop  database test if exists;
 +create  database test;
 +
 +use test;
 +
 +

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/count_empty_01/count_empty_01.2.update.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/count_empty_01/count_empty_01.2.update.sqlpp
index b0903c6,0000000..dae5bfa
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/count_empty_01/count_empty_01.2.update.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/count_empty_01/count_empty_01.2.update.sqlpp
@@@ -1,24 -1,0 +1,24 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +/*
-  * Description    : Tests that sql-count aggregation correctly returns 0 for an empty stream,
++ * Description    : Tests that coll_sql-count aggregation correctly returns 0 for an empty stream,
 + *                  without an aggregate combiner.
 + * Success        : Yes
 + */
 +

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/count_empty_01/count_empty_01.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/count_empty_01/count_empty_01.3.query.sqlpp
index 7038ffc,0000000..09e881e
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/count_empty_01/count_empty_01.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/count_empty_01/count_empty_01.3.query.sqlpp
@@@ -1,29 -1,0 +1,29 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +/*
-  * Description    : Tests that sql-count aggregation correctly returns 0 for an empty stream,
++ * Description    : Tests that coll_sql-count aggregation correctly returns 0 for an empty stream,
 + *                  without an aggregate combiner.
 + * Success        : Yes
 + */
 +
- select element "sql-count"((
++select element "coll_sql-count"((
 +    select element x
 +    from  [1,2,3] as x
 +    where (x > 10)
 +));

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/count_empty_02/count_empty_02.1.ddl.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/count_empty_02/count_empty_02.1.ddl.sqlpp
index 8450cbd,0000000..48286b2
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/count_empty_02/count_empty_02.1.ddl.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/count_empty_02/count_empty_02.1.ddl.sqlpp
@@@ -1,38 -1,0 +1,38 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +/*
-  * Description    : Tests that sql-count aggregation correctly returns 0 for an empty stream,
++ * Description    : Tests that coll_sql-count aggregation correctly returns 0 for an empty stream,
 + *                  with an aggregate combiner.
 + * Success        : Yes
 + */
 +
 +drop  database test if exists;
 +create  database test;
 +
 +use test;
 +
 +
 +create type test.TestType as
 + closed {
 +  id : int64,
 +  val : double
 +}
 +
 +create  table Test(TestType) primary key id;
 +

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/count_empty_02/count_empty_02.2.update.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/count_empty_02/count_empty_02.2.update.sqlpp
index 2fe949f,0000000..df5c80a
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/count_empty_02/count_empty_02.2.update.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/count_empty_02/count_empty_02.2.update.sqlpp
@@@ -1,24 -1,0 +1,24 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +/*
-  * Description    : Tests that sql-count aggregation correctly returns 0 for an empty stream,
++ * Description    : Tests that coll_sql-count aggregation correctly returns 0 for an empty stream,
 + *                  with an aggregate combiner.
 + * Success        : Yes
 + */
 +

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/count_empty_02/count_empty_02.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/count_empty_02/count_empty_02.3.query.sqlpp
index a8cbd64,0000000..c9fdf7f
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/count_empty_02/count_empty_02.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/count_empty_02/count_empty_02.3.query.sqlpp
@@@ -1,31 -1,0 +1,31 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +/*
-  * Description    : Tests that sql-count aggregation correctly returns 0 for an empty stream,
++ * Description    : Tests that coll_sql-count aggregation correctly returns 0 for an empty stream,
 + *                  with an aggregate combiner.
 + * Success        : Yes
 + */
 +
 +use test;
 +
 +
- select element test."sql-count"((
++select element test."coll_sql-count"((
 +    select element x.val
 +    from  Test as x
 +));

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/count_null/count_null.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/count_null/count_null.3.query.sqlpp
index afb7f99,0000000..cfe5457
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/count_null/count_null.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/count_null/count_null.3.query.sqlpp
@@@ -1,26 -1,0 +1,26 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +
 +use test;
 +
 +
- {'sql-count':test."sql-count"((
++{'sql-count':test."coll_sql-count"((
 +    select element x.doubleField
 +    from  Numeric as x
 +))};

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/issue395/issue395.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/issue395/issue395.3.query.sqlpp
index 080fd11,0000000..a6726e2
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/issue395/issue395.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/issue395/issue395.3.query.sqlpp
@@@ -1,26 -1,0 +1,26 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +
 +use test;
 +
 +
- select element test."sql-count"((
++select element test."coll_sql-count"((
 +    select element l.name
 +    from  Employee as l
 +));

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/issue412_0/issue412_0.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/issue412_0/issue412_0.3.query.sqlpp
index 50aedcf,0000000..f17fc50
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/issue412_0/issue412_0.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/issue412_0/issue412_0.3.query.sqlpp
@@@ -1,20 -1,0 +1,20 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +
- "sql-count"(['ASTERIX','Hyracks',null]);
++"coll_sql-count"(['ASTERIX','Hyracks',null]);

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/issue412_1/issue412_1.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/issue412_1/issue412_1.3.query.sqlpp
index 8ffe5f1,0000000..cf9a3ef
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/issue412_1/issue412_1.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/issue412_1/issue412_1.3.query.sqlpp
@@@ -1,20 -1,0 +1,20 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +
- {'sql-count':"sql-count"([1,60,null]),'average':"sql-avg"([1,60,null]),'sql-sum':"sql-sum"([1,60,null]),'sql-min':"sql-min"([1,60,null]),'sql-max':"sql-max"([1,60,null])};
++{'sql-count':"coll_sql-count"([1,60,null]),'average':"coll_sql-avg"([1,60,null]),'sql-sum':"coll_sql-sum"([1,60,null]),'sql-min':"coll_sql-min"([1,60,null]),'sql-max':"coll_sql-max"([1,60,null])};

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/issue425_min_hetero_list/issue425_min_hetero_list.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/issue425_min_hetero_list/issue425_min_hetero_list.3.query.sqlpp
index 1d0f51f,0000000..7df2200
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/issue425_min_hetero_list/issue425_min_hetero_list.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/issue425_min_hetero_list/issue425_min_hetero_list.3.query.sqlpp
@@@ -1,20 -1,0 +1,20 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +
- "sql-min"([23,748374857483]);
++"coll_sql-min"([23,748374857483]);

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/issue425_min_hetero_list_1/issue425_min_hetero_list_1.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/issue425_min_hetero_list_1/issue425_min_hetero_list_1.3.query.sqlpp
index 8d6c63a,0000000..d4f04f0
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/issue425_min_hetero_list_1/issue425_min_hetero_list_1.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/issue425_min_hetero_list_1/issue425_min_hetero_list_1.3.query.sqlpp
@@@ -1,20 -1,0 +1,20 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +
- "sql-min"([748374857483,23,0.5]);
++"coll_sql-min"([748374857483,23,0.5]);

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/issue425_sum_hetero_list/issue425_sum_hetero_list.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/issue425_sum_hetero_list/issue425_sum_hetero_list.3.query.sqlpp
index a05abfe,0000000..11fd369
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/issue425_sum_hetero_list/issue425_sum_hetero_list.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/issue425_sum_hetero_list/issue425_sum_hetero_list.3.query.sqlpp
@@@ -1,20 -1,0 +1,20 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +
- "sql-sum"([23,748374857483]);
++"coll_sql-sum"([23,748374857483]);

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/issue425_sum_hetero_list_1/issue425_sum_hetero_list_1.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/issue425_sum_hetero_list_1/issue425_sum_hetero_list_1.3.query.sqlpp
index 0435afa,0000000..6613ea0
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/issue425_sum_hetero_list_1/issue425_sum_hetero_list_1.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/issue425_sum_hetero_list_1/issue425_sum_hetero_list_1.3.query.sqlpp
@@@ -1,20 -1,0 +1,20 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +
- "sql-sum"([748374857483,23,0.5]);
++"coll_sql-sum"([748374857483,23,0.5]);

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/issue531_string_min_max/issue531_string_min_max.1.ddl.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/issue531_string_min_max/issue531_string_min_max.1.ddl.sqlpp
index d393206,0000000..2e8f728
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/issue531_string_min_max/issue531_string_min_max.1.ddl.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/issue531_string_min_max/issue531_string_min_max.1.ddl.sqlpp
@@@ -1,40 -1,0 +1,40 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +/**
-  * issue531_string_sql-min_sql-max
++ * issue531_string_coll_sql-min_coll_sql-max
 + *
-  * Purpose: test the support of string values for sql-min and sql-max aggregation function
++ * Purpose: test the support of string values for coll_sql-min and coll_sql-max aggregation function
 + * Result: success
 + *
 + */
 +
 +drop  database test if exists;
 +create  database test;
 +
 +use test;
 +
 +
 +create type test.TestType as
 +{
 +  id : int64,
 +  name : string
 +}
 +
 +create  table t1(TestType) primary key id;
 +

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/issue531_string_min_max/issue531_string_min_max.2.update.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/issue531_string_min_max/issue531_string_min_max.2.update.sqlpp
index da2154f,0000000..506302f
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/issue531_string_min_max/issue531_string_min_max.2.update.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/issue531_string_min_max/issue531_string_min_max.2.update.sqlpp
@@@ -1,45 -1,0 +1,45 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +/**
-  * issue531_string_sql-min_sql-max
++ * issue531_string_coll_sql-min_coll_sql-max
 + *
-  * Purpose: test the support of string values for sql-min and sql-max aggregation function
++ * Purpose: test the support of string values for coll_sql-min and coll_sql-max aggregation function
 + * Result: success
 + *
 + */
 +
 +use test;
 +
 +
 +insert into t1
 +select element {'id':5,'name':'Smith'};
 +insert into t1
 +select element {'id':12,'name':'Roger'};
 +insert into t1
 +select element {'id':67,'name':'Kevin'};
 +insert into t1
 +select element {'id':32,'name':'Bob'};
 +insert into t1
 +select element {'id':89,'name':'John'};
 +insert into t1
 +select element {'id':10,'name':'Alex'};
 +insert into t1
 +select element {'id':37,'name':'Calvin'};
 +insert into t1
 +select element {'id':98,'name':'Susan'};

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/issue531_string_min_max/issue531_string_min_max.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/issue531_string_min_max/issue531_string_min_max.3.query.sqlpp
index 89d205a,0000000..abdbb1a
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/issue531_string_min_max/issue531_string_min_max.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/issue531_string_min_max/issue531_string_min_max.3.query.sqlpp
@@@ -1,36 -1,0 +1,36 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +/**
-  * issue531_string_sql-min_sql-max
++ * issue531_string_coll_sql-min_coll_sql-max
 + *
-  * Purpose: test the support of string values for sql-min and sql-max aggregation function
++ * Purpose: test the support of string values for coll_sql-min and coll_sql-max aggregation function
 + * Result: success
 + *
 + */
 +
 +use test;
 +
 +
- select element {'sql-min':test."sql-min"((
++select element {'sql-min':test."coll_sql-min"((
 +        select element l.name
 +        from  t1 as l
-     )),'sql-max':test."sql-max"((
++    )),'sql-max':test."coll_sql-max"((
 +        select element l.name
 +        from  t1 as l
 +    ))};

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/max_empty_01/max_empty_01.1.ddl.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/max_empty_01/max_empty_01.1.ddl.sqlpp
index d497b33,0000000..461e0df
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/max_empty_01/max_empty_01.1.ddl.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/max_empty_01/max_empty_01.1.ddl.sqlpp
@@@ -1,27 -1,0 +1,27 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +/*
-  * Description    : Tests that sql-max aggregation correctly returns null for an empty stream,
++ * Description    : Tests that coll_sql-max aggregation correctly returns null for an empty stream,
 + *                  without an aggregate combiner.
 + * Success        : Yes
 + */
 +
 +drop  database test if exists;
 +create  database test;
 +

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/max_empty_01/max_empty_01.2.update.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/max_empty_01/max_empty_01.2.update.sqlpp
index 7ed3258,0000000..e353b90
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/max_empty_01/max_empty_01.2.update.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/max_empty_01/max_empty_01.2.update.sqlpp
@@@ -1,24 -1,0 +1,24 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +/*
-  * Description    : Tests that sql-max aggregation correctly returns null for an empty stream,
++ * Description    : Tests that coll_sql-max aggregation correctly returns null for an empty stream,
 + *                  without an aggregate combiner.
 + * Success        : Yes
 + */
 +

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/max_empty_01/max_empty_01.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/max_empty_01/max_empty_01.3.query.sqlpp
index a11559d,0000000..e60b446
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/max_empty_01/max_empty_01.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/max_empty_01/max_empty_01.3.query.sqlpp
@@@ -1,32 -1,0 +1,32 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +/*
-  * Description    : Tests that sql-max aggregation correctly returns null for an empty stream,
++ * Description    : Tests that coll_sql-max aggregation correctly returns null for an empty stream,
 + *                  without an aggregate combiner.
 + * Success        : Yes
 + */
 +
 +use test;
 +
 +
- select element test."sql-max"((
++select element test."coll_sql-max"((
 +    select element x
 +    from  [1,2,3] as x
 +    where (x > 10)
 +));

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/max_empty_02/max_empty_02.1.ddl.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/max_empty_02/max_empty_02.1.ddl.sqlpp
index 98ceb13,0000000..01f9aa8
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/max_empty_02/max_empty_02.1.ddl.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/max_empty_02/max_empty_02.1.ddl.sqlpp
@@@ -1,38 -1,0 +1,38 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +/*
-  * Description    : Tests that sql-max aggregation correctly returns null for an empty stream,
++ * Description    : Tests that coll_sql-max aggregation correctly returns null for an empty stream,
 + *                  with an aggregate combiner.
 + * Success        : Yes
 + */
 +
 +drop  database test if exists;
 +create  database test;
 +
 +use test;
 +
 +
 +create type test.TestType as
 + closed {
 +  id : int64,
 +  val : double
 +}
 +
 +create  table Test(TestType) primary key id;
 +

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/max_empty_02/max_empty_02.2.update.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/max_empty_02/max_empty_02.2.update.sqlpp
index 2599da6,0000000..405f886
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/max_empty_02/max_empty_02.2.update.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/max_empty_02/max_empty_02.2.update.sqlpp
@@@ -1,24 -1,0 +1,24 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +/*
-  * Description    : Tests that sql-max aggregation correctly returns null for an empty stream,
++ * Description    : Tests that coll_sql-max aggregation correctly returns null for an empty stream,
 + *                  with an aggregate combiner.
 + * Success        : Yes
 + */
 +

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/max_empty_02/max_empty_02.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/max_empty_02/max_empty_02.3.query.sqlpp
index 0d1bd13,0000000..03f437e
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/max_empty_02/max_empty_02.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/max_empty_02/max_empty_02.3.query.sqlpp
@@@ -1,31 -1,0 +1,31 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +/*
-  * Description    : Tests that sql-max aggregation correctly returns null for an empty stream,
++ * Description    : Tests that coll_sql-max aggregation correctly returns null for an empty stream,
 + *                  with an aggregate combiner.
 + * Success        : Yes
 + */
 +
 +use test;
 +
 +
- select element test."sql-max"((
++select element test."coll_sql-max"((
 +    select element x.val
 +    from  Test as x
 +));

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/min_empty_01/min_empty_01.1.ddl.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/min_empty_01/min_empty_01.1.ddl.sqlpp
index 8574115,0000000..fab2dc7
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/min_empty_01/min_empty_01.1.ddl.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/min_empty_01/min_empty_01.1.ddl.sqlpp
@@@ -1,27 -1,0 +1,27 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +/*
-  * Description    : Tests that sql-min aggregation correctly returns null for an empty stream,
++ * Description    : Tests that coll_sql-min aggregation correctly returns null for an empty stream,
 + *                  without an aggregate combiner.
 + * Success        : Yes
 + */
 +
 +drop  database test if exists;
 +create  database test;
 +

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/min_empty_01/min_empty_01.2.update.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/min_empty_01/min_empty_01.2.update.sqlpp
index 0313677,0000000..959dacd
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/min_empty_01/min_empty_01.2.update.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/min_empty_01/min_empty_01.2.update.sqlpp
@@@ -1,24 -1,0 +1,24 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +/*
-  * Description    : Tests that sql-min aggregation correctly returns null for an empty stream,
++ * Description    : Tests that coll_sql-min aggregation correctly returns null for an empty stream,
 + *                  without an aggregate combiner.
 + * Success        : Yes
 + */
 +

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/min_empty_01/min_empty_01.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/min_empty_01/min_empty_01.3.query.sqlpp
index d02d66e,0000000..11ee5b3
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/min_empty_01/min_empty_01.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/min_empty_01/min_empty_01.3.query.sqlpp
@@@ -1,32 -1,0 +1,32 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +/*
-  * Description    : Tests that sql-min aggregation correctly returns null for an empty stream,
++ * Description    : Tests that coll_sql-min aggregation correctly returns null for an empty stream,
 + *                  without an aggregate combiner.
 + * Success        : Yes
 + */
 +
 +use test;
 +
 +
- select element test."sql-min"((
++select element test."coll_sql-min"((
 +    select element x
 +    from  [1,2,3] as x
 +    where (x > 10)
 +));

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/min_empty_02/min_empty_02.1.ddl.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/min_empty_02/min_empty_02.1.ddl.sqlpp
index 7ff9d0b,0000000..af822f7
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/min_empty_02/min_empty_02.1.ddl.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/min_empty_02/min_empty_02.1.ddl.sqlpp
@@@ -1,38 -1,0 +1,38 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +/*
-  * Description    : Tests that sql-min aggregation correctly returns null for an empty stream,
++ * Description    : Tests that coll_sql-min aggregation correctly returns null for an empty stream,
 + *                  with an aggregate combiner.
 + * Success        : Yes
 + */
 +
 +drop  database test if exists;
 +create  database test;
 +
 +use test;
 +
 +
 +create type test.TestType as
 + closed {
 +  id : int64,
 +  val : double
 +}
 +
 +create  table Test(TestType) primary key id;
 +

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/min_empty_02/min_empty_02.2.update.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/min_empty_02/min_empty_02.2.update.sqlpp
index 2c8ed44,0000000..2edf292
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/min_empty_02/min_empty_02.2.update.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/min_empty_02/min_empty_02.2.update.sqlpp
@@@ -1,24 -1,0 +1,24 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +/*
-  * Description    : Tests that sql-min aggregation correctly returns null for an empty stream,
++ * Description    : Tests that coll_sql-min aggregation correctly returns null for an empty stream,
 + *                  with an aggregate combiner.
 + * Success        : Yes
 + */
 +

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/min_empty_02/min_empty_02.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/min_empty_02/min_empty_02.3.query.sqlpp
index 965dd90,0000000..48a033d
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/min_empty_02/min_empty_02.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/min_empty_02/min_empty_02.3.query.sqlpp
@@@ -1,31 -1,0 +1,31 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +/*
-  * Description    : Tests that sql-min aggregation correctly returns null for an empty stream,
++ * Description    : Tests that coll_sql-min aggregation correctly returns null for an empty stream,
 + *                  with an aggregate combiner.
 + * Success        : Yes
 + */
 +
 +use test;
 +
 +
- select element test."sql-min"((
++select element test."coll_sql-min"((
 +    select element x.val
 +    from  Test as x
 +));

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/min_mixed/min_mixed.1.ddl.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/min_mixed/min_mixed.1.ddl.sqlpp
index 21781b6,0000000..1c1a445
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/min_mixed/min_mixed.1.ddl.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/min_mixed/min_mixed.1.ddl.sqlpp
@@@ -1,24 -1,0 +1,24 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +/*
- * Description  : Run sql-min over an ordered list with mixed types
++* Description  : Run coll_sql-min over an ordered list with mixed types
 +* Expected Res : Failure
 +* Date         : Feb 7th 2014
 +*/
 +



[09/50] [abbrv] incubator-asterixdb git commit: Merge branch 'master' into hyracks-merge2

Posted by im...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/util/SqlppVariableUtil.java
----------------------------------------------------------------------
diff --cc asterixdb/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/util/SqlppVariableUtil.java
index 8a63aa5,0000000..59e9389
mode 100644,000000..100644
--- a/asterixdb/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/util/SqlppVariableUtil.java
+++ b/asterixdb/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/util/SqlppVariableUtil.java
@@@ -1,47 -1,0 +1,131 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +package org.apache.asterix.lang.sqlpp.util;
 +
++import java.util.Collection;
++import java.util.HashSet;
++import java.util.List;
++import java.util.Set;
++
++import org.apache.asterix.common.exceptions.AsterixException;
++import org.apache.asterix.lang.common.base.ILangExpression;
++import org.apache.asterix.lang.common.clause.GroupbyClause;
++import org.apache.asterix.lang.common.clause.LetClause;
++import org.apache.asterix.lang.common.expression.GbyVariableExpressionPair;
++import org.apache.asterix.lang.common.expression.VariableExpr;
 +import org.apache.asterix.lang.common.struct.VarIdentifier;
++import org.apache.asterix.lang.sqlpp.clause.AbstractBinaryCorrelateClause;
++import org.apache.asterix.lang.sqlpp.clause.FromClause;
++import org.apache.asterix.lang.sqlpp.clause.FromTerm;
++import org.apache.asterix.lang.sqlpp.visitor.FreeVariableVisitor;
 +
 +public class SqlppVariableUtil {
 +
 +    private static String USER_VAR_PREFIX = "$";
 +
 +    public static VarIdentifier toUserDefinedVariableName(VarIdentifier var) {
 +        String varName = var.getValue();
 +        return toUserDefinedVariableName(varName);
 +    }
 +
 +    public static VarIdentifier toUserDefinedVariableName(String varName) {
 +        if (varName.startsWith(USER_VAR_PREFIX)) {
 +            return new VarIdentifier(varName.substring(1));
 +        }
 +        return new VarIdentifier(varName);
 +    }
 +
 +    public static String toInternalVariableName(String varName) {
 +        return USER_VAR_PREFIX + varName;
 +    }
 +
 +    public static VarIdentifier toInternalVariableIdentifier(String idName) {
 +        return new VarIdentifier(USER_VAR_PREFIX + idName);
 +    }
 +
++    public static Collection<VariableExpr> getFreeVariables(ILangExpression langExpr) throws AsterixException {
++        Collection<VariableExpr> freeVars = new HashSet<>();
++        FreeVariableVisitor visitor = new FreeVariableVisitor();
++        langExpr.accept(visitor, freeVars);
++        return freeVars;
++    }
++
++    public static Collection<VariableExpr> getBindingVariables(FromClause fromClause) {
++        Set<VariableExpr> bindingVars = new HashSet<>();
++        if (fromClause == null) {
++            return bindingVars;
++        }
++        for (FromTerm fromTerm : fromClause.getFromTerms()) {
++            bindingVars.addAll(getBindingVariables(fromTerm));
++        }
++        return bindingVars;
++    }
++
++    public static Collection<VariableExpr> getBindingVariables(FromTerm fromTerm) {
++        Set<VariableExpr> bindingVars = new HashSet<>();
++        if (fromTerm == null) {
++            return bindingVars;
++        }
++        bindingVars.add(fromTerm.getLeftVariable());
++        if (fromTerm.hasPositionalVariable()) {
++            bindingVars.add(fromTerm.getPositionalVariable());
++        }
++        for (AbstractBinaryCorrelateClause correlateClause : fromTerm.getCorrelateClauses()) {
++            bindingVars.add(correlateClause.getRightVariable());
++            if (correlateClause.hasPositionalVariable()) {
++                bindingVars.add(correlateClause.getPositionalVariable());
++            }
++        }
++        return bindingVars;
++    }
++
++    public static Collection<VariableExpr> getBindingVariables(GroupbyClause gbyClause) {
++        Set<VariableExpr> bindingVars = new HashSet<>();
++        if (gbyClause == null) {
++            return bindingVars;
++        }
++        for (GbyVariableExpressionPair gbyKey : gbyClause.getGbyPairList()) {
++            VariableExpr var = gbyKey.getVar();
++            if (var != null) {
++                bindingVars.add(var);
++            }
++        }
++        for (GbyVariableExpressionPair gbyKey : gbyClause.getDecorPairList()) {
++            VariableExpr var = gbyKey.getVar();
++            if (var != null) {
++                bindingVars.add(var);
++            }
++        }
++        bindingVars.addAll(gbyClause.getWithVarList());
++        bindingVars.add(gbyClause.getGroupVar());
++        return bindingVars;
++    }
++
++    public static Collection<VariableExpr> getBindingVariables(List<LetClause> letClauses) {
++        Set<VariableExpr> bindingVars = new HashSet<>();
++        if (letClauses == null || letClauses.isEmpty()) {
++            return bindingVars;
++        }
++        for (LetClause letClause : letClauses) {
++            bindingVars.add(letClause.getVarExpr());
++        }
++        return bindingVars;
++    }
++
 +}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/visitor/SqlppAstPrintVisitor.java
----------------------------------------------------------------------
diff --cc asterixdb/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/visitor/SqlppAstPrintVisitor.java
index df32b01,0000000..0f36646
mode 100644,000000..100644
--- a/asterixdb/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/visitor/SqlppAstPrintVisitor.java
+++ b/asterixdb/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/visitor/SqlppAstPrintVisitor.java
@@@ -1,271 -1,0 +1,295 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +package org.apache.asterix.lang.sqlpp.visitor;
 +
 +import java.io.PrintWriter;
 +
 +import org.apache.asterix.common.exceptions.AsterixException;
++import org.apache.asterix.common.functions.FunctionSignature;
 +import org.apache.asterix.lang.common.base.Expression;
 +import org.apache.asterix.lang.common.clause.GroupbyClause;
 +import org.apache.asterix.lang.common.clause.LetClause;
++import org.apache.asterix.lang.common.expression.CallExpr;
 +import org.apache.asterix.lang.common.expression.GbyVariableExpressionPair;
 +import org.apache.asterix.lang.common.struct.Identifier;
 +import org.apache.asterix.lang.common.visitor.QueryPrintVisitor;
 +import org.apache.asterix.lang.sqlpp.clause.AbstractBinaryCorrelateClause;
 +import org.apache.asterix.lang.sqlpp.clause.FromClause;
 +import org.apache.asterix.lang.sqlpp.clause.FromTerm;
 +import org.apache.asterix.lang.sqlpp.clause.HavingClause;
 +import org.apache.asterix.lang.sqlpp.clause.JoinClause;
 +import org.apache.asterix.lang.sqlpp.clause.NestClause;
 +import org.apache.asterix.lang.sqlpp.clause.Projection;
 +import org.apache.asterix.lang.sqlpp.clause.SelectBlock;
 +import org.apache.asterix.lang.sqlpp.clause.SelectClause;
 +import org.apache.asterix.lang.sqlpp.clause.SelectElement;
 +import org.apache.asterix.lang.sqlpp.clause.SelectRegular;
 +import org.apache.asterix.lang.sqlpp.clause.SelectSetOperation;
 +import org.apache.asterix.lang.sqlpp.clause.UnnestClause;
 +import org.apache.asterix.lang.sqlpp.expression.SelectExpression;
 +import org.apache.asterix.lang.sqlpp.struct.SetOperationRight;
++import org.apache.asterix.lang.sqlpp.util.FunctionMapUtil;
 +import org.apache.asterix.lang.sqlpp.visitor.base.ISqlppVisitor;
++import org.apache.asterix.om.functions.AsterixBuiltinFunctions;
 +import org.apache.hyracks.algebricks.common.utils.Pair;
 +
 +public class SqlppAstPrintVisitor extends QueryPrintVisitor implements ISqlppVisitor<Void, Integer> {
 +
 +    private final PrintWriter out;
 +
 +    public SqlppAstPrintVisitor() {
 +        super();
 +        out = new PrintWriter(System.out);
 +    }
 +
 +    public SqlppAstPrintVisitor(PrintWriter out) {
 +        super(out);
 +        this.out = out;
 +    }
 +
 +    @Override
 +    public Void visit(FromClause fromClause, Integer step) throws AsterixException {
 +        out.print(skip(step) + "FROM [");
 +        int index = 0;
 +        for (FromTerm fromTerm : fromClause.getFromTerms()) {
 +            if (index > 0) {
 +                out.println(",");
 +            }
 +            fromTerm.accept(this, step + 1);
 +            ++index;
 +        }
 +        out.println(skip(step) + "]");
 +        return null;
 +    }
 +
 +    @Override
 +    public Void visit(FromTerm fromTerm, Integer step) throws AsterixException {
 +        fromTerm.getLeftExpression().accept(this, step);
 +        out.println(skip(step) + "AS");
 +        fromTerm.getLeftVariable().accept(this, step);
 +        if (fromTerm.hasPositionalVariable()) {
 +            out.println(skip(step) + "AT");
 +            fromTerm.getPositionalVariable().accept(this, step);
 +        }
 +        if (fromTerm.hasCorrelateClauses()) {
 +            for (AbstractBinaryCorrelateClause correlateClause : fromTerm.getCorrelateClauses()) {
 +                correlateClause.accept(this, step);
 +            }
 +        }
 +        return null;
 +    }
 +
 +    @Override
 +    public Void visit(JoinClause joinClause, Integer step) throws AsterixException {
 +        out.println(skip(step) + joinClause.getJoinType() + " JOIN");
 +        joinClause.getRightExpression().accept(this, step + 1);
 +        out.println(skip(step + 1) + "AS");
 +        joinClause.getRightVariable().accept(this, step + 1);
 +        if (joinClause.hasPositionalVariable()) {
 +            out.println(skip(step + 1) + "AT");
 +            joinClause.getPositionalVariable().accept(this, step + 1);
 +        }
 +        out.println(skip(step + 1) + "ON");
 +        joinClause.getConditionExpression().accept(this, step + 1);
 +        return null;
 +    }
 +
 +    @Override
 +    public Void visit(NestClause nestClause, Integer step) throws AsterixException {
 +        out.println(skip(step) + nestClause.getJoinType() + " NEST");
 +        nestClause.getRightExpression().accept(this, step + 1);
 +        out.println(skip(step + 1) + "AS");
 +        nestClause.getRightVariable().accept(this, step + 1);
 +        if (nestClause.hasPositionalVariable()) {
 +            out.println(skip(step + 1) + "AT");
 +            nestClause.getPositionalVariable().accept(this, step + 1);
 +        }
 +        out.println(skip(step + 1) + "ON");
 +        nestClause.getConditionExpression().accept(this, step + 1);
 +        return null;
 +    }
 +
 +    @Override
 +    public Void visit(Projection projection, Integer step) throws AsterixException {
 +        projection.getExpression().accept(this, step);
 +        out.println(skip(step) + projection.getName());
 +        return null;
 +    }
 +
 +    @Override
 +    public Void visit(SelectBlock selectBlock, Integer step) throws AsterixException {
 +        selectBlock.getSelectClause().accept(this, step);
 +        if (selectBlock.hasFromClause()) {
 +            selectBlock.getFromClause().accept(this, step);
 +        }
 +        if (selectBlock.hasLetClauses()) {
 +            for (LetClause letClause : selectBlock.getLetList()) {
 +                letClause.accept(this, step);
 +            }
 +        }
 +        if (selectBlock.hasWhereClause()) {
 +            selectBlock.getWhereClause().accept(this, step);
 +        }
 +        if (selectBlock.hasGroupbyClause()) {
 +            selectBlock.getGroupbyClause().accept(this, step);
 +            if (selectBlock.hasLetClausesAfterGroupby()) {
 +                for (LetClause letClause : selectBlock.getLetListAfterGroupby()) {
 +                    letClause.accept(this, step);
 +                }
 +            }
 +        }
 +        if (selectBlock.hasHavingClause()) {
 +            selectBlock.getHavingClause().accept(this, step);
 +        }
 +        return null;
 +    }
 +
 +    @Override
 +    public Void visit(SelectClause selectClause, Integer step) throws AsterixException {
 +        if (selectClause.selectRegular()) {
 +            selectClause.getSelectRegular().accept(this, step);
 +        }
 +        if (selectClause.selectElement()) {
 +            selectClause.getSelectElement().accept(this, step);
 +        }
 +        return null;
 +    }
 +
 +    @Override
 +    public Void visit(SelectElement selectElement, Integer step) throws AsterixException {
 +        out.println(skip(step) + "SELECT ELEMENT [");
 +        selectElement.getExpression().accept(this, step);
 +        out.println(skip(step) + "]");
 +        return null;
 +    }
 +
 +    @Override
 +    public Void visit(SelectRegular selectRegular, Integer step) throws AsterixException {
 +        out.println(skip(step) + "SELECT [");
 +        for (Projection projection : selectRegular.getProjections()) {
 +            projection.accept(this, step);
 +        }
 +        out.println(skip(step) + "]");
 +        return null;
 +    }
 +
 +    @Override
 +    public Void visit(SelectSetOperation selectSetOperation, Integer step) throws AsterixException {
 +        selectSetOperation.getLeftInput().accept(this, step);
 +        if (selectSetOperation.hasRightInputs()) {
 +            for (SetOperationRight right : selectSetOperation.getRightInputs()) {
 +                String all = right.isSetSemantics() ? " ALL " : "";
 +                out.println(skip(step) + right.getSetOpType() + all);
 +                right.getSetOperationRightInput().accept(this, step + 1);
 +            }
 +        }
 +        return null;
 +    }
 +
 +    @Override
 +    public Void visit(SelectExpression selectStatement, Integer step) throws AsterixException {
 +        if (selectStatement.isSubquery()) {
 +            out.println(skip(step) + "(");
 +        }
 +        int selectStep = selectStatement.isSubquery() ? step + 1 : step;
 +        if (selectStatement.hasLetClauses()) {
 +            for (LetClause letClause : selectStatement.getLetList()) {
 +                letClause.accept(this, selectStep);
 +            }
 +        }
 +        selectStatement.getSelectSetOperation().accept(this, selectStep);
 +        if (selectStatement.hasOrderby()) {
 +            selectStatement.getOrderbyClause().accept(this, selectStep);
 +        }
 +        if (selectStatement.hasLimit()) {
 +            selectStatement.getLimitClause().accept(this, selectStep);
 +        }
 +        if (selectStatement.isSubquery()) {
 +            out.println(skip(step) + ")");
 +        }
 +        return null;
 +    }
 +
 +    @Override
 +    public Void visit(UnnestClause unnestClause, Integer step) throws AsterixException {
 +        out.println(skip(step) + unnestClause.getJoinType() + " UNNEST");
 +        unnestClause.getRightExpression().accept(this, step + 1);
 +        out.println(skip(step + 1) + " AS");
 +        unnestClause.getRightVariable().accept(this, step + 1);
 +        if (unnestClause.hasPositionalVariable()) {
 +            out.println(skip(step + 1) + " AT");
 +            unnestClause.getPositionalVariable().accept(this, step + 1);
 +        }
 +        return null;
 +    }
 +
 +    @Override
 +    public Void visit(HavingClause havingClause, Integer step) throws AsterixException {
 +        out.println(skip(step) + " HAVING");
 +        havingClause.getFilterExpression().accept(this, step + 1);
 +        return null;
 +    }
 +
 +    @Override
++    public Void visit(CallExpr pf, Integer step) throws AsterixException {
++        FunctionSignature functionSignature = pf.getFunctionSignature();
++        FunctionSignature normalizedFunctionSignature = FunctionMapUtil
++                .normalizeBuiltinFunctionSignature(functionSignature, false);
++        if (AsterixBuiltinFunctions.isBuiltinCompilerFunction(normalizedFunctionSignature, true)) {
++            functionSignature = normalizedFunctionSignature;
++        }
++        out.println(skip(step) + "FunctionCall " + functionSignature.toString() + "[");
++        for (Expression expr : pf.getExprList()) {
++            expr.accept(this, step + 1);
++        }
++        out.println(skip(step) + "]");
++        return null;
++    }
++
++    @Override
 +    public Void visit(GroupbyClause gc, Integer step) throws AsterixException {
++        if (gc.isGroupAll()) {
++            out.println(skip(step) + "Group All");
++            return null;
++        }
 +        out.println(skip(step) + "Groupby");
 +        for (GbyVariableExpressionPair pair : gc.getGbyPairList()) {
 +            if (pair.getVar() != null) {
 +                pair.getVar().accept(this, step + 1);
 +                out.println(skip(step + 1) + ":=");
 +            }
 +            pair.getExpr().accept(this, step + 1);
 +        }
 +        if (gc.hasGroupVar()) {
 +            out.println(skip(step + 1) + "GROUP AS");
 +            gc.getGroupVar().accept(this, step + 1);
 +            if (gc.hasGroupFieldList()) {
 +                out.println(skip(step + 1) + "(");
 +                for (Pair<Expression, Identifier> field : gc.getGroupFieldList()) {
 +                    field.first.accept(this, step + 1);
 +                    out.println(skip(step + 1) + " AS " + field.second);
 +                }
 +                out.println(skip(step + 1) + ")");
 +            }
 +        }
 +        out.println();
 +        return null;
 +    }
 +
 +}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/visitor/SqlppDeleteRewriteVisitor.java
----------------------------------------------------------------------
diff --cc asterixdb/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/visitor/SqlppDeleteRewriteVisitor.java
index 5a15772,0000000..efff18e
mode 100644,000000..100644
--- a/asterixdb/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/visitor/SqlppDeleteRewriteVisitor.java
+++ b/asterixdb/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/visitor/SqlppDeleteRewriteVisitor.java
@@@ -1,95 -1,0 +1,95 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +package org.apache.asterix.lang.sqlpp.visitor;
 +
 +import java.util.ArrayList;
 +import java.util.List;
 +
 +import org.apache.asterix.common.functions.FunctionConstants;
 +import org.apache.asterix.common.functions.FunctionSignature;
 +import org.apache.asterix.lang.common.base.Expression;
 +import org.apache.asterix.lang.common.clause.WhereClause;
 +import org.apache.asterix.lang.common.expression.CallExpr;
 +import org.apache.asterix.lang.common.expression.LiteralExpr;
 +import org.apache.asterix.lang.common.expression.VariableExpr;
 +import org.apache.asterix.lang.common.literal.StringLiteral;
 +import org.apache.asterix.lang.common.statement.DeleteStatement;
 +import org.apache.asterix.lang.common.statement.Query;
 +import org.apache.asterix.lang.common.struct.Identifier;
 +import org.apache.asterix.lang.sqlpp.clause.FromClause;
 +import org.apache.asterix.lang.sqlpp.clause.FromTerm;
 +import org.apache.asterix.lang.sqlpp.clause.SelectBlock;
 +import org.apache.asterix.lang.sqlpp.clause.SelectClause;
 +import org.apache.asterix.lang.sqlpp.clause.SelectElement;
 +import org.apache.asterix.lang.sqlpp.clause.SelectSetOperation;
 +import org.apache.asterix.lang.sqlpp.expression.SelectExpression;
 +import org.apache.asterix.lang.sqlpp.struct.SetOperationInput;
 +import org.apache.asterix.lang.sqlpp.visitor.base.AbstractSqlppAstVisitor;
 +import org.mortbay.util.SingletonList;
 +
 +/**
 + * This class rewrites delete statement to contain a query that specifying
 + * what to delete.
 + */
 +public class SqlppDeleteRewriteVisitor extends AbstractSqlppAstVisitor<Void, Void> {
 +
 +    @Override
 +    public Void visit(DeleteStatement deleteStmt, Void visitArg) {
 +        List<Expression> arguments = new ArrayList<Expression>();
 +        Identifier dataverseName = deleteStmt.getDataverseName();
 +        Identifier datasetName = deleteStmt.getDatasetName();
 +        String arg = dataverseName == null ? datasetName.getValue()
 +                : dataverseName.getValue() + "." + datasetName.getValue();
 +        LiteralExpr argumentLiteral = new LiteralExpr(new StringLiteral(arg));
 +        arguments.add(argumentLiteral);
 +        CallExpr callExpression = new CallExpr(new FunctionSignature(FunctionConstants.ASTERIX_NS, "dataset", 1),
 +                arguments);
 +
 +        // From clause.
 +        VariableExpr var = deleteStmt.getVariableExpr();
 +        FromTerm fromTerm = new FromTerm(callExpression, var, null, null);
 +        @SuppressWarnings("unchecked")
 +        FromClause fromClause = new FromClause(SingletonList.newSingletonList(fromTerm));
 +
 +        // Where clause.
 +        WhereClause whereClause = null;
 +        Expression condition = deleteStmt.getCondition();
 +        if (condition != null) {
 +            whereClause = new WhereClause(condition);
 +        }
 +
 +        // Select clause.
 +        VariableExpr returnExpr = new VariableExpr(var.getVar());
 +        returnExpr.setIsNewVar(false);
 +        SelectElement selectElement = new SelectElement(returnExpr);
 +        SelectClause selectClause = new SelectClause(selectElement, null, false);
 +
 +        // Construct the select expression.
 +        SelectBlock selectBlock = new SelectBlock(selectClause, fromClause, null, whereClause, null, null, null);
 +        SelectSetOperation selectSetOperation = new SelectSetOperation(new SetOperationInput(selectBlock, null), null);
 +        SelectExpression selectExpression = new SelectExpression(null, selectSetOperation, null, null, false);
-         Query query = new Query();
++        Query query = new Query(false, selectExpression, 0, new ArrayList<>(), new ArrayList<>());
 +        query.setBody(selectExpression);
 +
 +        // return the delete statement.
 +        deleteStmt.setQuery(query);
 +        return null;
 +    }
 +
 +}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/visitor/SqlppSubstituteVariablesVisitor.java
----------------------------------------------------------------------
diff --cc asterixdb/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/visitor/SqlppSubstituteVariablesVisitor.java
index a9aff55,0000000..f737eb7
mode 100644,000000..100644
--- a/asterixdb/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/visitor/SqlppSubstituteVariablesVisitor.java
+++ b/asterixdb/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/visitor/SqlppSubstituteVariablesVisitor.java
@@@ -1,45 -1,0 +1,48 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +package org.apache.asterix.lang.sqlpp.visitor;
 +
++import org.apache.asterix.common.exceptions.AsterixException;
 +import org.apache.asterix.lang.common.base.Expression;
 +import org.apache.asterix.lang.common.expression.VariableExpr;
 +import org.apache.asterix.lang.common.rewrites.LangRewritingContext;
 +import org.apache.asterix.lang.common.rewrites.VariableSubstitutionEnvironment;
++import org.apache.asterix.lang.sqlpp.util.SqlppRewriteUtil;
 +
 +public class SqlppSubstituteVariablesVisitor extends SqlppCloneAndSubstituteVariablesVisitor {
 +
 +    public SqlppSubstituteVariablesVisitor() {
 +        super(null);
 +    }
 +
 +    @Override
-     protected Expression rewriteVariableExpr(VariableExpr expr, VariableSubstitutionEnvironment env) {
++    protected Expression rewriteVariableExpr(VariableExpr expr, VariableSubstitutionEnvironment env)
++            throws AsterixException {
 +        if (env.constainsOldVar(expr)) {
-             return env.findSubstituion(expr);
++            return (Expression) SqlppRewriteUtil.deepCopy(env.findSubstituion(expr));
 +        }
 +        return expr;
 +    }
 +
 +    @Override
 +    public VariableExpr generateNewVariable(LangRewritingContext context, VariableExpr varExpr) {
 +        return varExpr;
 +    }
 +
 +}


[26/50] [abbrv] incubator-asterixdb git commit: Merge branch 'master' into hyracks-merge2

Posted by im...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-sql-like/q14_promotion_effect/q14_promotion_effect.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-sql-like/q14_promotion_effect/q14_promotion_effect.3.query.sqlpp
index a8f29d3,0000000..a376e45
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-sql-like/q14_promotion_effect/q14_promotion_effect.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-sql-like/q14_promotion_effect/q14_promotion_effect.3.query.sqlpp
@@@ -1,35 -1,0 +1,35 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +
 +use tpch;
 +
 +
- select element (100.0 * tpch.sum((
++select element (100.0 * tpch.coll_sum((
 +      select element tpch."switch-case"(tpch.like(i.p_type,'PROMO%'),true,(i.l_extendedprice * (1 - i.l_discount)),false,0.0)
 +      from  lp as i
-   )) / tpch.sum((
++  )) / tpch.coll_sum((
 +      select element (i.l_extendedprice * (1 - i.l_discount))
 +      from  lp as i
 +  )))
 +from  LineItem as l,
 +      Part as p
 +let lp = {'p_type': p.p_type, 'l_extendedprice': l.l_extendedprice, 'l_discount': l.l_discount}
 +where ((l.l_partkey = p.p_partkey) and (l.l_shipdate >= '1995-09-01') and (l.l_shipdate < '1995-10-01'))
 +group by 1 as t
 +;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-sql-like/q15_top_supplier/q15_top_supplier.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-sql-like/q15_top_supplier/q15_top_supplier.3.query.sqlpp
index e21104b,0000000..fdc4fcc
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-sql-like/q15_top_supplier/q15_top_supplier.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-sql-like/q15_top_supplier/q15_top_supplier.3.query.sqlpp
@@@ -1,42 -1,0 +1,42 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +
 +use tpch;
 +
 +
 +declare function revenue() {
 +(
-     select element {'supplier_no':l_suppkey,'total_revenue':tpch.sum((
++    select element {'supplier_no':l_suppkey,'total_revenue':tpch.coll_sum((
 +            select element (i.l_extendedprice * (1 - i.l_discount))
 +            from  l as i
 +        ))}
 +    from  LineItem as l
 +    where ((l.l_shipdate >= '1996-01-01') and (l.l_shipdate < '1996-04-01'))
 +    group by l.l_suppkey as l_suppkey
 +)
 +};
- with  m as tpch.max((
++with  m as tpch.coll_max((
 +      select element r2.total_revenue
 +      from  tpch.revenue() as r2
 +  ))
 +select element {'s_suppkey':s.s_suppkey,'s_name':s.s_name,'s_address':s.s_address,'s_phone':s.s_phone,'total_revenue':r.total_revenue}
 +from  Supplier as s,
 +      tpch.revenue() as r
 +where ((s.s_suppkey = r.supplier_no) and (r.total_revenue < (m + 0.000000001)) and (r.total_revenue > (m - 0.000000001)))
 +;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-sql-like/q16_parts_supplier_relationship/q16_parts_supplier_relationship.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-sql-like/q16_parts_supplier_relationship/q16_parts_supplier_relationship.3.query.sqlpp
index cc4d01a,0000000..acf3281
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-sql-like/q16_parts_supplier_relationship/q16_parts_supplier_relationship.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-sql-like/q16_parts_supplier_relationship/q16_parts_supplier_relationship.3.query.sqlpp
@@@ -1,49 -1,0 +1,49 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +
 +use tpch;
 +
 +
 +declare function tmp() {
 +(
 +    select element {'p_brand':psp.p_brand,'p_type':psp.p_type,'p_size':psp.p_size,'ps_suppkey':psp.ps_suppkey}
 +    from  (
 +        select element {'p_brand':p.p_brand,'p_type':p.p_type,'p_size':p.p_size,'ps_suppkey':ps.ps_suppkey}
 +        from  Partsupp as ps,
 +              Part as p
 +        where ((p.p_partkey = ps.ps_partkey) and (p.p_brand != 'Brand#45') and tpch.not(tpch.like(p.p_type,'MEDIUM POLISHED%')))
 +    ) as psp,
 +          Supplier as s
 +    where ((psp.ps_suppkey = s.s_suppkey) and tpch.not(tpch.like(s.s_comment,'%Customer%Complaints%')))
 +)
 +};
 +select element {'p_brand':p_brand,'p_type':p_type,'p_size':p_size,'supplier_cnt':supplier_cnt}
 +from  (
 +    select element {'p_brand':p_brand1,'p_type':p_type1,'p_size':p_size1,'ps_suppkey':ps_suppkey1}
 +    from  tpch.tmp() as t
 +    where ((t.p_size = 49) or (t.p_size = 14) or (t.p_size = 23) or (t.p_size = 45) or (t.p_size = 19) or (t.p_size = 3) or (t.p_size = 36) or (t.p_size = 9))
 +    group by t.p_brand as p_brand1,t.p_type as p_type1,t.p_size as p_size1,t.ps_suppkey as ps_suppkey1
 +) as t2
 +group by t2.p_brand as p_brand,t2.p_type as p_type,t2.p_size as p_size
- with  supplier_cnt as tpch.count((
++with  supplier_cnt as coll_count((
 +      select element i.ps_suppkey
 +      from  t2 as i
 +  ))
 +order by supplier_cnt desc,p_brand,p_type,p_size
 +;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-sql-like/q17_large_gby_variant/q17_large_gby_variant.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-sql-like/q17_large_gby_variant/q17_large_gby_variant.3.query.sqlpp
index ada4f75,0000000..2e057d7
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-sql-like/q17_large_gby_variant/q17_large_gby_variant.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-sql-like/q17_large_gby_variant/q17_large_gby_variant.3.query.sqlpp
@@@ -1,57 -1,0 +1,57 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +
 +use tpch;
 +
 +
- select element {'t_partkey':l_partkey,'t_count':tpch.count(l),'t_avg_quantity':(0.2 * tpch.avg((
++select element {'t_partkey':l_partkey,'t_count':tpch.count(l),'t_avg_quantity':(0.2 * tpch.coll_avg((
 +          select element i.l_quantity
 +          from  l as i
-       ))),'t_max_suppkey':tpch.max((
++      ))),'t_max_suppkey':tpch.coll_max((
 +        select element i.l_suppkey
 +        from  l as i
-     )),'t_max_linenumber':tpch.max((
++    )),'t_max_linenumber':tpch.coll_max((
 +        select element i.l_linenumber
 +        from  l as i
-     )),'t_avg_extendedprice':tpch.avg((
++    )),'t_avg_extendedprice':tpch.coll_avg((
 +        select element i.l_extendedprice
 +        from  l as i
-     )),'t_avg_discount':tpch.avg((
++    )),'t_avg_discount':tpch.coll_avg((
 +        select element i.l_discount
 +        from  l as i
-     )),'t_avg_tax':tpch.avg((
++    )),'t_avg_tax':tpch.coll_avg((
 +        select element i.l_tax
 +        from  l as i
-     )),'t_max_shipdate':tpch.max((
++    )),'t_max_shipdate':tpch.coll_max((
 +        select element i.l_shipdate
 +        from  l as i
-     )),'t_min_commitdate':tpch.min((
++    )),'t_min_commitdate':tpch.coll_min((
 +        select element i.l_commitdate
 +        from  l as i
-     )),'t_min_receiptdate':tpch.min((
++    )),'t_min_receiptdate':tpch.coll_min((
 +        select element i.l_receiptdate
 +        from  l as i
-     )),'t_max_comment':tpch.max((
++    )),'t_max_comment':tpch.coll_max((
 +        select element i.l_comment
 +        from  l as i
 +    ))}
 +from  LineItem as l
 +group by l.l_partkey as l_partkey
 +order by l_partkey
 +;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-sql-like/q17_small_quantity_order_revenue/q17_small_quantity_order_revenue.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-sql-like/q17_small_quantity_order_revenue/q17_small_quantity_order_revenue.3.query.sqlpp
index 5aa971e,0000000..2f48cf3
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-sql-like/q17_small_quantity_order_revenue/q17_small_quantity_order_revenue.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-sql-like/q17_small_quantity_order_revenue/q17_small_quantity_order_revenue.3.query.sqlpp
@@@ -1,40 -1,0 +1,40 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +
 +use tpch;
 +
 +
 +declare function tmp() {
 +(
-     select element {'t_partkey':l_partkey,'t_avg_quantity':(0.2 * tpch.avg((
++    select element {'t_partkey':l_partkey,'t_avg_quantity':(0.2 * COLL_AVG((
 +              select element i.l_quantity
 +              from  l as i
 +          )))}
 +    from  LineItem as l
 +    group by l.l_partkey as l_partkey
 +)
 +};
 +
- select element (tpch.sum((
++select element (COLL_SUM((
 +      select element l.l_extendedprice
 +      from  LineItem as l,
 +            Part as p,
 +            tpch.tmp() as t
 +      where (((p.p_partkey = l.l_partkey) and (p.p_container = 'MED BOX')) and ((l.l_partkey = t.t_partkey) and (l.l_quantity < t.t_avg_quantity)))
 +  )) / 7.0);

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-sql-like/q18_large_volume_customer/q18_large_volume_customer.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-sql-like/q18_large_volume_customer/q18_large_volume_customer.3.query.sqlpp
index 15081ff,0000000..5215013
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-sql-like/q18_large_volume_customer/q18_large_volume_customer.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-sql-like/q18_large_volume_customer/q18_large_volume_customer.3.query.sqlpp
@@@ -1,42 -1,0 +1,42 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +
 +use tpch;
 +
 +
- select element {'c_name':c_name,'c_custkey':c_custkey,'o_orderkey':o_orderkey,'o_orderdate':o_orderdate,'o_totalprice':o_totalprice,'sum_quantity':tpch.sum((
++select element {'c_name':c_name,'c_custkey':c_custkey,'o_orderkey':o_orderkey,'o_orderdate':o_orderdate,'o_totalprice':o_totalprice,'sum_quantity':COLL_SUM((
 +        select element j.l_quantity
 +        from  l as j
 +    ))}
 +from  Customer as c,
 +      Orders as o,
 +      (
-     select element {'l_orderkey':l_orderkey,'t_sum_quantity':tpch.sum((
++    select element {'l_orderkey':l_orderkey,'t_sum_quantity':COLL_SUM((
 +            select element i.l_quantity
 +            from  l as i
 +        ))}
 +    from  LineItem as l
 +    group by l.l_orderkey as l_orderkey
 +) as t,
 +      LineItem as l
 +where ((c.c_custkey = o.o_custkey) and ((o.o_orderkey = t.l_orderkey) and (t.t_sum_quantity > 30)) and (l.l_orderkey = o.o_orderkey))
 +group by c.c_name as c_name,c.c_custkey as c_custkey,o.o_orderkey as o_orderkey,o.o_orderdate as o_orderdate,o.o_totalprice as o_totalprice
 +order by o_totalprice desc,o_orderdate
 +limit 100
 +;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-sql-like/q19_discounted_revenue/q19_discounted_revenue.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-sql-like/q19_discounted_revenue/q19_discounted_revenue.3.query.sqlpp
index 7065f87,0000000..f245189
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-sql-like/q19_discounted_revenue/q19_discounted_revenue.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-sql-like/q19_discounted_revenue/q19_discounted_revenue.3.query.sqlpp
@@@ -1,30 -1,0 +1,30 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +
 +use tpch;
 +
 +
 +set "import-private-functions" "true";
 +
- select element tpch.sum((
++select element tpch.coll_sum((
 +    select element (l.l_extendedprice * (1 - l.l_discount))
 +    from  LineItem as l,
 +          Part as p
 +    where ((p.p_partkey = l.l_partkey) and (((p.p_brand = 'Brand#12') and tpch."reg-exp"(p.p_container,'SM CASE||SM BOX||SM PACK||SM PKG') and (l.l_quantity >= 1) and (l.l_quantity <= 11) and (p.p_size >= 1) and (p.p_size <= 5) and tpch."reg-exp"(l.l_shipmode,'AIR||AIR REG') and (l.l_shipinstruct = 'DELIVER IN PERSON')) or ((p.p_brand = 'Brand#23') and tpch."reg-exp"(p.p_container,'MED BAG||MED BOX||MED PKG||MED PACK') and (l.l_quantity >= 10) and (l.l_quantity <= 20) and (p.p_size >= 1) and (p.p_size <= 10) and tpch."reg-exp"(l.l_shipmode,'AIR||AIR REG') and (l.l_shipinstruct = 'DELIVER IN PERSON')) or ((p.p_brand = 'Brand#34') and tpch."reg-exp"(p.p_container,'LG CASE||LG BOX||LG PACK||LG PKG') and (l.l_quantity >= 20) and (l.l_quantity <= 30) and (p.p_size >= 1) and (p.p_size <= 15) and tpch."reg-exp"(l.l_shipmode,'AIR||AIR REG') and (l.l_shipinstruct = 'DELIVER IN PERSON'))))
 +));

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-sql-like/q20_potential_part_promotion/q20_potential_part_promotion.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-sql-like/q20_potential_part_promotion/q20_potential_part_promotion.3.query.sqlpp
index 0657ad3,0000000..82e38bf
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-sql-like/q20_potential_part_promotion/q20_potential_part_promotion.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-sql-like/q20_potential_part_promotion/q20_potential_part_promotion.3.query.sqlpp
@@@ -1,53 -1,0 +1,53 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +
 +use tpch;
 +
 +
 +select element {'s_name':t4.s_name,'s_address':t4.s_address}
 +from  (
 +    select distinct element {'ps_suppkey':pst1.ps_suppkey}
 +    from  (
-         select element {'l_partkey':l_partkey,'l_suppkey':l_suppkey,'sum_quantity':(0.5 * tpch.sum((
++        select element {'l_partkey':l_partkey,'l_suppkey':l_suppkey,'sum_quantity':(0.5 * tpch.coll_sum((
 +                  select element i.l_quantity
 +                  from  l as i
 +              )))}
 +        from  LineItem as l
 +        group by l.l_partkey as l_partkey,l.l_suppkey as l_suppkey
 +    ) as t2,
 +          (
 +        select element {'ps_suppkey':ps.ps_suppkey,'ps_partkey':ps.ps_partkey,'ps_availqty':ps.ps_availqty}
 +        from  Partsupp as ps,
 +              (
 +            select distinct element {'p_partkey':p.p_partkey}
 +            from  Part as p
 +        ) as t1
 +        where (ps.ps_partkey = t1.p_partkey)
 +    ) as pst1
 +    where ((pst1.ps_partkey = t2.l_partkey) and (pst1.ps_suppkey = t2.l_suppkey) and (pst1.ps_availqty > t2.sum_quantity))
 +) as t3,
 +      (
 +    select element {'s_name':s.s_name,'s_address':s.s_address,'s_suppkey':s.s_suppkey}
 +    from  Nation as n,
 +          Supplier as s
 +    where (s.s_nationkey = n.n_nationkey)
 +) as t4
 +where (t3.ps_suppkey = t4.s_suppkey)
 +order by t4.s_name
 +;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-sql-like/q21_suppliers_who_kept_orders_waiting/q21_suppliers_who_kept_orders_waiting.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-sql-like/q21_suppliers_who_kept_orders_waiting/q21_suppliers_who_kept_orders_waiting.3.query.sqlpp
index 5382019,0000000..578d72e
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-sql-like/q21_suppliers_who_kept_orders_waiting/q21_suppliers_who_kept_orders_waiting.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-sql-like/q21_suppliers_who_kept_orders_waiting/q21_suppliers_who_kept_orders_waiting.3.query.sqlpp
@@@ -1,80 -1,0 +1,80 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +
 +use tpch;
 +
 +
 +declare function tmp1() {
 +(
-     select element {'l_orderkey':l_orderkey,'count_suppkey':tpch.count((
++    select element {'l_orderkey':l_orderkey,'count_suppkey':COLL_COUNT((
 +            select element i.l_suppkey
 +            from  l2 as i
-         )),'max_suppkey':tpch.max((
++        )),'max_suppkey':tpch.coll_max((
 +            select element i.l_suppkey
 +            from  l2 as i
 +        ))}
 +    from  (
 +        select element {'l_orderkey':l_orderkey1,'l_suppkey':l_suppkey1}
 +        from  LineItem as l
 +        group by l.l_orderkey as l_orderkey1,l.l_suppkey as l_suppkey1
 +    ) as l2
 +    group by l2.l_orderkey as l_orderkey
 +)
 +};
 +declare function tmp2() {
 +(
-     select element {'l_orderkey':l_orderkey,'count_suppkey':tpch.count((
++    select element {'l_orderkey':l_orderkey,'count_suppkey':COLL_COUNT((
 +            select element i.l_suppkey
 +            from  l2 as i
-         )),'max_suppkey':tpch.max((
++        )),'max_suppkey':COLL_MAX((
 +            select element i.l_suppkey
 +            from  l2 as i
 +        ))}
 +    from  (
 +        select element {'l_orderkey':l_orderkey1,'l_suppkey':l_suppkey1}
 +        from  LineItem as l
 +        where (l.l_receiptdate > l.l_commitdate)
 +        group by l.l_orderkey as l_orderkey1,l.l_suppkey as l_suppkey1
 +    ) as l2
 +    group by l2.l_orderkey as l_orderkey
 +)
 +};
 +select element {'s_name':s_name,'numwait':numwait}
 +from  (
 +    select element {'s_name':t3.s_name,'l_suppkey':t3.l_suppkey,'l_orderkey':t2.l_orderkey,'count_suppkey':t2.count_suppkey,'max_suppkey':t2.max_suppkey}
 +    from  (
 +        select element {'s_name':ns.s_name,'l_orderkey':t1.l_orderkey,'l_suppkey':l.l_suppkey}
 +        from  LineItem as l,
 +              (
 +            select element {'s_name':s.s_name,'s_suppkey':s.s_suppkey}
 +            from  Nation as n,
 +                  Supplier as s
 +            where (s.s_nationkey = n.n_nationkey)
 +        ) as ns,
 +              Orders as o,
 +              tpch.tmp1() as t1
 +        where (((ns.s_suppkey = l.l_suppkey) and (l.l_receiptdate > l.l_commitdate)) and (o.o_orderkey = l.l_orderkey) and (l.l_orderkey = t1.l_orderkey))
 +    ) as t3,
 +          tpch.tmp2() as t2
 +    where ((t2.count_suppkey >= 0) and (t3.l_orderkey = t2.l_orderkey))
 +) as t4
 +group by t4.s_name as s_name
- with  numwait as tpch.count(t4)
++with  numwait as COLL_COUNT(t4)
 +order by numwait desc,s_name
 +;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-sql-like/q22_global_sales_opportunity/q22_global_sales_opportunity.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-sql-like/q22_global_sales_opportunity/q22_global_sales_opportunity.3.query.sqlpp
index 6136008,0000000..328c753
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-sql-like/q22_global_sales_opportunity/q22_global_sales_opportunity.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-sql-like/q22_global_sales_opportunity/q22_global_sales_opportunity.3.query.sqlpp
@@@ -1,42 -1,0 +1,42 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +
 +use tpch;
 +
 +
 +declare function q22_customer_tmp() {
 +(
 +    select element {'c_acctbal':c.c_acctbal,'c_custkey':c.c_custkey,'cntrycode':tpch.substring(c.c_phone,1,2)}
 +    from  Customer as c
 +)
 +};
- with  avg as tpch.avg((
++with  avg as tpch.coll_avg((
 +      select element c.c_acctbal
 +      from  Customer as c
 +      where (c.c_acctbal > 0.0)
 +  ))
- select element {'cntrycode':cntrycode,'numcust':tpch.count(ct),'totacctbal':tpch.sum((
++select element {'cntrycode':cntrycode,'numcust':tpch.count(ct),'totacctbal':tpch.coll_sum((
 +        select element i.c_acctbal
 +        from  ct as i
 +    ))}
 +from  tpch.q22_customer_tmp() as ct
 +where (ct.c_acctbal > avg)
 +group by ct.cntrycode as cntrycode
 +order by cntrycode
 +;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-sql-like/query-issue601/query-issue601.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-sql-like/query-issue601/query-issue601.3.query.sqlpp
index 44852ec,0000000..733924e
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-sql-like/query-issue601/query-issue601.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-sql-like/query-issue601/query-issue601.3.query.sqlpp
@@@ -1,32 -1,0 +1,32 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +/*
 + * Description  : This test case is to verify the fix from issue601
 + * https://code.google.com/p/asterixdb/issues/detail?id=601
 + * Expected Res : SUCCESS
 + * Date         : 10th Oct 2014
 + */
 +
 +use tpch;
 +
 +
- select element {'l_linenumber':l_linenumber,'count_order':tpch.count(l)}
++select element {'l_linenumber':l_linenumber,'count_order':COLL_COUNT(l)}
 +from  LineItem as l
 +group by l.l_linenumber as l_linenumber
 +;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-sql-like/query-issue638/query-issue638.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-sql-like/query-issue638/query-issue638.3.query.sqlpp
index d2e8a7e,0000000..78bc3cd
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-sql-like/query-issue638/query-issue638.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-sql-like/query-issue638/query-issue638.3.query.sqlpp
@@@ -1,63 -1,0 +1,63 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +/*
 + * Description  : This test case is to verify the fix from issue638
 + * https://code.google.com/p/asterixdb/issues/detail?id=638
 + * Expected Res : SUCCESS
 + * Date         : 24th Oct. 2014
 + */
 +
 +use tpch;
 +
 +
- select element {'nation':nation,'o_year':o_year,'sum_profit':tpch.sum((
++select element {'nation':nation,'o_year':o_year,'sum_profit':COLL_SUM((
 +        select element pr.amount
 +        from  profit as pr
 +    ))}
 +from  (
 +    select element {'nation':l3.n_name,'o_year':o_year,'amount':amount}
 +    from  Orders as o,
 +          (
 +        select element {'l_extendedprice':l2.l_extendedprice,'l_discount':l2.l_discount,'l_quantity':l2.l_quantity,'l_orderkey':l2.l_orderkey,'n_name':l2.n_name,'ps_supplycost':l2.ps_supplycost}
 +        from  Part as p,
 +              (
 +            select element {'l_extendedprice':l1.l_extendedprice,'l_discount':l1.l_discount,'l_quantity':l1.l_quantity,'l_partkey':l1.l_partkey,'l_orderkey':l1.l_orderkey,'n_name':l1.n_name,'ps_supplycost':ps.ps_supplycost}
 +            from  Partsupp as ps,
 +                  (
 +                select element {'l_suppkey':l.l_suppkey,'l_extendedprice':l.l_extendedprice,'l_discount':l.l_discount,'l_quantity':l.l_quantity,'l_partkey':l.l_partkey,'l_orderkey':l.l_orderkey,'n_name':s1.n_name}
 +                from  (
 +                    select element {'s_suppkey':s.s_suppkey,'n_name':n.n_name}
 +                    from  Supplier as s,
 +                          Nation as n
 +                    where (n.n_nationkey = s.s_nationkey)
 +                ) as s1,
 +                      LineItem as l
 +                where (s1.s_suppkey = l.l_suppkey)
 +            ) as l1
 +            where ((ps.ps_suppkey = l1.l_suppkey) and (ps.ps_partkey = l1.l_partkey))
 +        ) as l2
-         where (tpch.contains(p.p_name,'green') and (p.p_partkey = l2.l_partkey))
++        where (CONTAINS(p.p_name,'green') and (p.p_partkey = l2.l_partkey))
 +    ) as l3
 +    with  amount as ((l3.l_extendedprice * (1 - l3.l_discount)) - (l3.ps_supplycost * l3.l_quantity)),
 +          o_year as tpch."get-year"(o.o_orderdate)
 +    where (o.o_orderkey = l3.l_orderkey)
 +) as profit
 +group by profit.nation as nation,profit.o_year as o_year
 +order by nation,o_year desc
 +;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-sql-like/query-issue785-2/query-issue785-2.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-sql-like/query-issue785-2/query-issue785-2.3.query.sqlpp
index 341d824,0000000..0127261
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-sql-like/query-issue785-2/query-issue785-2.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-sql-like/query-issue785-2/query-issue785-2.3.query.sqlpp
@@@ -1,55 -1,0 +1,55 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +/*
 + * Description  : This test case is to verify the fix from issue785
 + * https://code.google.com/p/asterixdb/issues/detail?id=785
 + * Expected Res : SUCCESS
 + * Date         : 2nd Oct. 2014
 + */
 +
 +use tpch;
 +
 +
 +with  t as (
 +      select element {'n_nationkey':nation.n_nationkey,'n_name':nation.n_name}
 +      from  Nation as nation,
 +            SelectedNation as sn
 +      where (nation.n_nationkey = sn.n_nationkey)
 +  ),
 +      X as (
 +      select element {'nation_key':nation_key,'order_date':orderdate,'sum_price':sum}
 +      from  t as n,
 +            Customer as customer,
 +            Orders as orders
 +      where ((orders.o_custkey = customer.c_custkey) and (customer.c_nationkey = n.n_nationkey))
 +      group by orders.o_orderdate as orderdate,n.n_nationkey as nation_key
-       with  sum as tpch.sum((
++      with  sum as COLL_SUM((
 +            select element o.o_totalprice
 +            from  orders as o
 +        ))
 +  )
 +select element {'nation_key':nation_key,'sum_price':(
 +        select element {'orderdate':y.order_date,'sum_price':y.sum_price}
 +        from  x as y
 +        order by y.sum_price desc
 +        limit 3
 +    )}
 +from  X as x
 +group by x.nation_key as nation_key
 +;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-sql-like/query-issue785/query-issue785.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-sql-like/query-issue785/query-issue785.3.query.sqlpp
index 0f31629,0000000..db98f32
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-sql-like/query-issue785/query-issue785.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-sql-like/query-issue785/query-issue785.3.query.sqlpp
@@@ -1,52 -1,0 +1,52 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +/*
 + * Description  : This test case is to verify the fix from issue785
 + * https://code.google.com/p/asterixdb/issues/detail?id=785
 + * Expected Res : SUCCESS
 + * Date         : 2nd Oct. 2014
 + */
 +
 +use tpch;
 +
 +
 +select element {'nation_key':nation_key,'sum_price':(
 +        select element {'orderdate':od,'sum_price':sum}
 +        from  x as i
 +        group by i.order_date as od
-         with  sum as tpch.sum((
++        with  sum as COLL_SUM((
 +              select element s.sum_price
 +              from  i as s
 +          ))
 +        order by sum desc
 +        limit 3
 +    )}
 +from  (
-     select element {'nation_key':nation_key,'order_date':orderdate,'sum_price':tpch.sum((
++    select element {'nation_key':nation_key,'order_date':orderdate,'sum_price':COLL_SUM((
 +            select element o.o_totalprice
 +            from  orders as o
 +        ))}
 +    from  Nation as n,
 +          Customer as customer,
 +          Orders as orders
 +    where ((orders.o_custkey = customer.c_custkey) and (customer.c_nationkey = n.n_nationkey))
 +    group by orders.o_orderdate as orderdate,n.n_nationkey as nation_key
 +) as x
 +group by x.nation_key as nation_key
 +;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-sql-like/query-issue786/query-issue786.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-sql-like/query-issue786/query-issue786.3.query.sqlpp
index c35853c,0000000..7098a55
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-sql-like/query-issue786/query-issue786.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-sql-like/query-issue786/query-issue786.3.query.sqlpp
@@@ -1,45 -1,0 +1,45 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +/*
 + * Description  : This test case is to verify the fix from issue786
 + * https://code.google.com/p/asterixdb/issues/detail?id=786
 + * Expected Res : SUCCESS
 + * Date         : 10th Oct. 2014
 + */
 +
 +use tpch;
 +
 +
 +select element {'nation_key':nation.n_nationkey,'name':nation.n_name,'aggregates':(
 +        select element {'order_date':orderdate,'sum_price':sum}
 +        from  Orders as orders,
 +              Customer as customer
 +        where ((orders.o_custkey = customer.c_custkey) and (customer.c_nationkey = nation.n_nationkey))
 +        group by orders.o_orderdate as orderdate
-         with  sum as tpch.sum((
++        with  sum as COLL_SUM((
 +              select element o.o_totalprice
 +              from  orders as o
 +          ))
 +        order by sum desc
 +        limit 3
 +    )}
 +from  Nation as nation,
 +      SelectedNation as sn
 +where (nation.n_nationkey = sn.sn_nationkey)
 +;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-sql/q06_forecast_revenue_change/q06_forecast_revenue_change.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-sql/q06_forecast_revenue_change/q06_forecast_revenue_change.3.query.sqlpp
index ba4f3e8,0000000..77ad1b6
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-sql/q06_forecast_revenue_change/q06_forecast_revenue_change.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-sql/q06_forecast_revenue_change/q06_forecast_revenue_change.3.query.sqlpp
@@@ -1,34 -1,0 +1,33 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +
 +use tpch;
 +
 +
- {'revenue':sum(
++{'revenue':
 +      (
-         SELECT ELEMENT l.l_extendedprice * l.l_discount
++        SELECT ELEMENT SUM(l.l_extendedprice * l.l_discount)
 +        FROM  LineItem AS l
 +        WHERE l.l_shipdate >= '1994-01-01'
 +              and l.l_shipdate < '1995-01-01'
 +              and l.l_discount >= 0.05
 +              and l.l_discount <= 0.07
 +              and l.l_quantity < 24
-       )
-     )
++      )[0]
 +};

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-sql/q11_important_stock/q11_important_stock.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-sql/q11_important_stock/q11_important_stock.3.query.sqlpp
index 0e84e68,0000000..6faf579
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-sql/q11_important_stock/q11_important_stock.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-sql/q11_important_stock/q11_important_stock.3.query.sqlpp
@@@ -1,54 -1,0 +1,52 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +
 +use tpch;
 +
 +
- WITH sum as sum(
-      (
-       SELECT ELEMENT ps.ps_supplycost * ps.ps_availqty
++WITH sum as (
++      SELECT ELEMENT SUM(ps.ps_supplycost * ps.ps_availqty)
 +      FROM  Partsupp AS ps,
 +            (
 +                SELECT s.s_suppkey s_suppkey
 +                FROM  Supplier as s,
 +                      Nation as n
 +                WHERE s.s_nationkey = n.n_nationkey
 +            ) AS sn
 +      WHERE ps.ps_suppkey = sn.s_suppkey
-      )
- )
++)[0]
 +
 +SELECT t1.ps_partkey AS partkey,
 +       t1.part_value AS part_value
 +FROM  (
 +        SELECT ps_partkey AS ps_partkey,
 +               tpch.sum(ps.ps_supplycost * ps.ps_availqty) AS part_value
 +        FROM  Partsupp ps,
 +          (
 +        SELECT s.s_suppkey s_suppkey
 +        FROM  Supplier AS s,
 +              Nation as n
 +        WHERE s.s_nationkey = n.n_nationkey
 +    ) sn
 +    WHERE ps.ps_suppkey = sn.s_suppkey
 +    GROUP BY ps.ps_partkey AS ps_partkey
 +) t1
 +WHERE t1.part_value > sum * 0.00001
 +ORDER BY t1.part_value DESC
 +;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-sql/q13_customer_distribution/q13_customer_distribution.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-sql/q13_customer_distribution/q13_customer_distribution.3.query.sqlpp
index 24794de,0000000..e069759
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-sql/q13_customer_distribution/q13_customer_distribution.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-sql/q13_customer_distribution/q13_customer_distribution.3.query.sqlpp
@@@ -1,40 -1,0 +1,40 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +
 +USE tpch;
 +
 +SELECT c_count AS c_count, count(gco) AS custdist
 +FROM  (
 +        SELECT c_custkey AS c_custkey, sum(co.o_orderkey_count) AS c_count
 +        FROM  (
 +                SELECT c.c_custkey AS c_custkey,
-                        count(
++                       coll_count(
 +                           (
 +                            select element o.o_orderkey
 +                            from  Orders as o
 +                            where c.c_custkey = o.o_custkey and not(like(o.o_comment,'%special%requests%'))
 +                           )
 +                         ) AS o_orderkey_count
 +                from  Customer c
 +        ) co
 +        GROUP BY co.c_custkey c_custkey
 +) gco
 +GROUP BY gco.c_count as c_count
 +ORDER BY custdist desc,c_count DESC
 +;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-sql/q15_top_supplier/q15_top_supplier.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-sql/q15_top_supplier/q15_top_supplier.3.query.sqlpp
index 1a036cc,0000000..d75a39b
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-sql/q15_top_supplier/q15_top_supplier.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-sql/q15_top_supplier/q15_top_supplier.3.query.sqlpp
@@@ -1,46 -1,0 +1,47 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +
 +USE tpch;
 +
 +
 +declare function revenue() {
 +(
 +    SELECT l_suppkey AS supplier_no,
 +           sum(l.l_extendedprice * (1 - l.l_discount)) AS total_revenue
 +    FROM  LineItem l
 +    WHERE l.l_shipdate >= '1996-01-01' and l.l_shipdate < '1996-04-01'
 +    GROUP BY l.l_suppkey l_suppkey
 +)
 +};
 +
- WITH  m AS max((
-       SELECT ELEMENT r2.total_revenue
++WITH  m AS (
++      SELECT ELEMENT max(r2.total_revenue)
 +      FROM revenue() r2
- ))
++)[0]
++
 +SELECT s.s_suppkey s_suppkey,
 +       s.s_name s_name,
 +       s.s_address s_address,
 +       s.s_phone s_phone,
 +       r.total_revenue total_revenue
 +FROM  Supplier s,
 +      revenue() r
 +WHERE s.s_suppkey = r.supplier_no AND r.total_revenue < m + 0.000000001
 +      AND r.total_revenue > m - 0.000000001
 +;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-sql/q17_small_quantity_order_revenue/q17_small_quantity_order_revenue.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-sql/q17_small_quantity_order_revenue/q17_small_quantity_order_revenue.3.query.sqlpp
index 4519fa6,0000000..63a33ce
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-sql/q17_small_quantity_order_revenue/q17_small_quantity_order_revenue.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-sql/q17_small_quantity_order_revenue/q17_small_quantity_order_revenue.3.query.sqlpp
@@@ -1,37 -1,0 +1,36 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +
 +USE tpch;
 +
 +
 +WITH tmp AS
 +(
 +    SELECT l_partkey t_partkey, 0.2 * avg(l.l_quantity) t_avg_quantity
 +    FROM LineItem AS l
 +    GROUP BY l.l_partkey AS l_partkey
 +)
 +
- SELECT ELEMENT sum((
-       SELECT ELEMENT l.l_extendedprice
-       FROM  tmp t,
++SELECT ELEMENT SUM(l.l_extendedprice) / 7.0
++FROM  tmp t,
 +            LineItem l,
 +            Part p
-       WHERE p.p_partkey = l.l_partkey AND p.p_container = 'MED BOX'
++WHERE p.p_partkey = l.l_partkey AND p.p_container = 'MED BOX'
 +            AND l.l_partkey = t.t_partkey AND l.l_quantity < t.t_avg_quantity
-   )) / 7.0;
++;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-sql/q19_discounted_revenue/q19_discounted_revenue.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-sql/q19_discounted_revenue/q19_discounted_revenue.3.query.sqlpp
index 558786c,0000000..1003b48
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-sql/q19_discounted_revenue/q19_discounted_revenue.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-sql/q19_discounted_revenue/q19_discounted_revenue.3.query.sqlpp
@@@ -1,58 -1,0 +1,55 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +
 +USE tpch;
 +
 +set "import-private-functions" "true";
 +
- sum(
- (
-   SELECT ELEMENT l.l_extendedprice * (1 - l.l_discount)
-   FROM LineItem l
-   JOIN Part p
-   ON p.p_partkey = l.l_partkey
++SELECT ELEMENT SUM(l.l_extendedprice * (1 - l.l_discount))
++FROM LineItem l
++JOIN Part p
++ON p.p_partkey = l.l_partkey
 +  WHERE
 +  (
 +    p.p_brand = 'Brand#12'
 +    AND "reg-exp"(p.p_container, 'SM CASE||SM BOX||SM PACK||SM PKG')
 +    AND l.l_quantity >= 1 and l.l_quantity <= 11
 +    AND p.p_size >= 1 and p.p_size <= 5
 +    AND "reg-exp"(l.l_shipmode, 'AIR||AIR REG')
 +    AND l.l_shipinstruct = 'DELIVER IN PERSON'
 +  )
 +  OR
 +  (
 +    p.p_brand = 'Brand#23'
 +    AND "reg-exp"(p.p_container, 'MED BAG||MED BOX||MED PKG||MED PACK')
 +    AND l.l_quantity >= 10 and l.l_quantity <= 20
 +    AND p.p_size >= 1 and p.p_size <= 10
 +    AND "reg-exp"(l.l_shipmode, 'AIR||AIR REG')
 +    AND l.l_shipinstruct = 'DELIVER IN PERSON'
 +  )
 +  OR
 +  (
 +    p.p_brand = 'Brand#34'
 +    AND "reg-exp"(p.p_container, 'LG CASE||LG BOX||LG PACK||LG PKG')
 +    AND l.l_quantity >= 20 and l.l_quantity <= 30
 +    AND p.p_size >= 1 and p.p_size <= 15
 +    AND "reg-exp"(l.l_shipmode, 'AIR||AIR REG')
 +    AND l.l_shipinstruct = 'DELIVER IN PERSON'
 +  )
-  )
- );
++;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-sql/q22_global_sales_opportunity/q22_global_sales_opportunity.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-sql/q22_global_sales_opportunity/q22_global_sales_opportunity.3.query.sqlpp
index 3630541,0000000..109e5a9
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-sql/q22_global_sales_opportunity/q22_global_sales_opportunity.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-sql/q22_global_sales_opportunity/q22_global_sales_opportunity.3.query.sqlpp
@@@ -1,40 -1,0 +1,40 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +
 +use tpch;
 +
 +
 +declare function q22_customer_tmp() {
 +(
 +    SELECT c.c_acctbal AS c_acctbal, c.c_custkey AS c_custkey, substring(c.c_phone,1,2) AS cntrycode
 +    FROM  Customer AS c
 +)
 +};
 +
- WITH  avg AS avg((
-       SELECT ELEMENT c.c_acctbal
++WITH  avg AS (
++      SELECT ELEMENT AVG(c.c_acctbal)
 +      FROM  Customer AS c
 +      WHERE c.c_acctbal > 0.0
-   ))
++  )[0]
 +SELECT  cntrycode AS cntrycode, count(ct) AS numcust, tpch.sum(ct.c_acctbal) AS totacctbal
 +FROM  q22_customer_tmp() AS ct
 +WHERE ct.c_acctbal > avg
 +GROUP BY ct.cntrycode AS cntrycode
 +ORDER BY cntrycode
 +;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-sql/query-issue562/query-issue562.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-sql/query-issue562/query-issue562.3.query.sqlpp
index 753b5d3,0000000..3c99324
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-sql/query-issue562/query-issue562.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-sql/query-issue562/query-issue562.3.query.sqlpp
@@@ -1,48 -1,0 +1,48 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +/*
 + * Description  : This test case is to verify the fix for issue562
 + * https://code.google.com/p/asterixdb/issues/detail?id=562
 + * Expected Res : SUCCESS
 + * Date         : 15th Jan. 2015
 + */
 +
 +USE tpch;
 +
 +
 +declare function q22_customer_tmp() {
 +(
 +    SELECT c.c_acctbal AS c_acctbal, c.c_custkey AS c_custkey, phone_substr AS cntrycode
 +    FROM  Customer AS c
 +    WITH  phone_substr AS substring(c.c_phone,1,2)
 +    WHERE phone_substr = '13' OR phone_substr = '31' OR phone_substr = '23' OR phone_substr = '29'
 +          OR phone_substr = '30' OR phone_substr = '18' OR phone_substr = '17'
 +)
 +};
 +
 +SELECT cntrycode AS cntrycode, count(ct) AS numcust, sum(ct.c_acctbal) AS totacctbal
 +FROM  q22_customer_tmp() as ct
- WHERE count((
++WHERE coll_count((
 +                SELECT ELEMENT o
 +                FROM  Orders AS o
 +                WHERE ct.c_custkey = o.o_custkey
 +            )) = 0
 +GROUP BY ct.cntrycode AS cntrycode
 +ORDER BY cntrycode
 +;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-sql/query-issue810-2/query-issue810-2.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-sql/query-issue810-2/query-issue810-2.3.query.sqlpp
index 5fe9965,0000000..af3421f
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-sql/query-issue810-2/query-issue810-2.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-sql/query-issue810-2/query-issue810-2.3.query.sqlpp
@@@ -1,44 -1,0 +1,44 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +/*
 + * Description  : This test case is to verify the fix for issue810
 + * https://code.google.com/p/asterixdb/issues/detail?id=810
 + * Expected Res : SUCCESS
 + * Date         : 16th Nov. 2014
 + */
 +
 +use tpch;
 +
 +
 +SELECT l_returnflag AS l_returnflag,
 +       l_linestatus AS l_linestatus,
-        count(cheaps) AS count_cheaps,
++       coll_count(cheaps) AS count_cheaps,
 +       total_charges AS total_charges
 +FROM  LineItem as l
 +WHERE l.l_shipdate <= '1998-09-02'
 +/* +hash */
 +GROUP BY l.l_returnflag AS l_returnflag,l.l_linestatus AS l_linestatus
 +WITH  cheaps AS (
 +      SELECT ELEMENT m
 +      FROM  l AS m
 +      WHERE m.l_discount > 0.05
 +  ),
 +total_charges AS sum(l.l_extendedprice * (1 - l.l_discount) * (1 + l.l_tax))
 +ORDER BY l_returnflag,l_linestatus
 +;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-sql/query-issue810-3/query-issue810-3.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-sql/query-issue810-3/query-issue810-3.3.query.sqlpp
index db4a46c,0000000..dbb9b88
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-sql/query-issue810-3/query-issue810-3.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-sql/query-issue810-3/query-issue810-3.3.query.sqlpp
@@@ -1,52 -1,0 +1,52 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +/*
 + * Description  : This test case is to verify the fix for issue810
 + * https://code.google.com/p/asterixdb/issues/detail?id=810
 + * Expected Res : SUCCESS
 + * Date         : 16th Nov. 2014
 + */
 +
 +USE tpch;
 +
 +
 +SELECT  l_returnflag AS l_returnflag,
 +        l_linestatus AS l_linestatus,
-         count(cheaps) AS count_cheaps,
-         avg(expensives) AS avg_expensive_discounts,
++        coll_count(cheaps) AS count_cheaps,
++        coll_avg(expensives) AS avg_expensive_discounts,
 +        sum_disc_prices AS sum_disc_prices,
 +        total_charges AS total_charges
 +FROM  LineItem AS l
 +WHERE l.l_shipdate <= '1998-09-02'
 +/* +hash */
 +GROUP BY l.l_returnflag AS l_returnflag,l.l_linestatus AS l_linestatus
 +WITH  expensives AS (
 +      SELECT ELEMENT i.l_discount
 +      FROM  l AS i
 +      WHERE i.l_discount <= 0.05
 +      ),
 +cheaps as (
 +      SELECT ELEMENT i
 +      FROM  l AS i
 +      WHERE i.l_discount > 0.05
 +  ),
 +sum_disc_prices AS sum(l.l_extendedprice * (1 - l.l_discount)),
 +total_charges AS sum(l.l_extendedprice * (1 - l.l_discount) * (1 + l.l_tax))
 +ORDER BY l_returnflag,l_linestatus
 +;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-sql/query-issue810/query-issue810.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-sql/query-issue810/query-issue810.3.query.sqlpp
index 7e5cb54,0000000..fc80184
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-sql/query-issue810/query-issue810.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-sql/query-issue810/query-issue810.3.query.sqlpp
@@@ -1,48 -1,0 +1,48 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +/*
 + * Description  : This test case is to verify the fix for issue810
 + * https://code.google.com/p/asterixdb/issues/detail?id=810
 + * Expected Res : SUCCESS
 + * Date         : 16th Nov. 2014
 + */
 +
 +USE tpch;
 +
 +
 +SELECT l_returnflag AS l_returnflag,
 +       l_linestatus AS l_linestatus,
-        count(cheap) AS count_cheaps,
-        count(expensive) AS count_expensives
++       coll_count(cheap) AS count_cheaps,
++       coll_count(expensive) AS count_expensives
 +FROM LineItem AS l
 +WHERE l.l_shipdate <= '1998-09-02'
 +/* +hash */
 +GROUP BY l.l_returnflag AS l_returnflag,l.l_linestatus AS l_linestatus
 +with  cheap as (
 +      SELECT ELEMENT m
 +      FROM l AS m
 +      WHERE m.l_discount > 0.05
 +),
 +expensive AS (
 +      SELECT ELEMENT a
 +      FROM l AS a
 +      WHERE a.l_discount <= 0.05
 +)
 +ORDER BY l_returnflag,l_linestatus
 +;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/nest_aggregate/nest_aggregate.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/nest_aggregate/nest_aggregate.3.query.sqlpp
index 3a015ae,0000000..8efcb7f
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/nest_aggregate/nest_aggregate.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/nest_aggregate/nest_aggregate.3.query.sqlpp
@@@ -1,46 -1,0 +1,46 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +/*
 + * Description  : This test case is to verify the fix for issue782
 + * https://code.google.com/p/asterixdb/issues/detail?id=782
 + * Expected Res : SUCCESS
 + * Date         : 2nd Jun 2014
 + */
 +
 +use tpch;
 +
 +
 +select element {'nation_key':nation.n_nationkey,'name':nation.n_name,'aggregates':(
 +        select element {'order_date':orderdate,'sum_price':sum}
 +        from  Orders as orders,
 +              Customer as customer
 +        where ((orders.o_custkey = customer.c_custkey) and (customer.c_nationkey = nation.n_nationkey))
 +        group by orders.o_orderdate as orderdate
-         with  sum as tpch.sum((
++        with  sum as tpch.coll_sum((
 +              select element o.o_totalprice
 +              from  orders as o
 +          ))
 +        order by sum
 +        limit 3
 +    )}
 +from  Nation as nation,
 +      SelectedNation as sn
 +where (nation.n_nationkey /*+ indexnl */ = sn.n_nationkey)
 +order by nation.n_nationkey
 +;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/nest_aggregate2/nest_aggregate2.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/nest_aggregate2/nest_aggregate2.3.query.sqlpp
index 1aa687c,0000000..1b70f0e
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/nest_aggregate2/nest_aggregate2.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/nest_aggregate2/nest_aggregate2.3.query.sqlpp
@@@ -1,46 -1,0 +1,46 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +/*
 + * Description  : This test case is to verify the fix for issue782
 + * https://code.google.com/p/asterixdb/issues/detail?id=782
 + * Expected Res : SUCCESS
 + * Date         : 2nd Jun 2014
 + */
 +
 +use tpch;
 +
 +
 +select element {'nation_key':nation.n_nationkey,'name':nation.n_name,'aggregates':(
 +        select element orderdate
 +        from  Orders as orders,
 +              Customer as customer
 +        where ((orders.o_custkey = customer.c_custkey) and (customer.c_nationkey = nation.n_nationkey))
 +        group by orders.o_orderdate as orderdate
-         with  sum as tpch.sum((
++        with  sum as tpch.coll_sum((
 +              select element o.o_totalprice
 +              from  orders as o
 +          ))
 +        order by sum
 +        limit 3
 +    )}
 +from  Nation as nation,
 +      SelectedNation as sn
 +where (nation.n_nationkey /*+ indexnl */ = sn.n_nationkey)
 +order by nation.n_nationkey
 +;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q01_pricing_summary_report_nt/q01_pricing_summary_report_nt.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q01_pricing_summary_report_nt/q01_pricing_summary_report_nt.3.query.sqlpp
index 94b313f,0000000..a971652
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q01_pricing_summary_report_nt/q01_pricing_summary_report_nt.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q01_pricing_summary_report_nt/q01_pricing_summary_report_nt.3.query.sqlpp
@@@ -1,52 -1,0 +1,52 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +
 +use tpch;
 +
 +
 +set "import-private-functions" "true";
 +
- select element {'l_returnflag':l_returnflag,'l_linestatus':l_linestatus,'sum_qty':tpch.sum((
++select element {'l_returnflag':l_returnflag,'l_linestatus':l_linestatus,'sum_qty':tpch.coll_sum((
 +        select element i.l_quantity
 +        from  l as i
-     )),'sum_base_price':tpch.sum((
++    )),'sum_base_price':tpch.coll_sum((
 +        select element i.l_extendedprice
 +        from  l as i
-     )),'sum_disc_price':tpch.sum((
++    )),'sum_disc_price':tpch.coll_sum((
 +        select element (i.l_extendedprice * (1 - i.l_discount))
 +        from  l as i
-     )),'sum_charge':tpch.sum((
++    )),'sum_charge':tpch.coll_sum((
 +        select element (i.l_extendedprice * (1 - i.l_discount) * (1 + i.l_tax))
 +        from  l as i
-     )),'ave_qty':tpch.avg((
++    )),'ave_qty':tpch.coll_avg((
 +        select element i.l_quantity
 +        from  l as i
-     )),'ave_price':tpch.avg((
++    )),'ave_price':tpch.coll_avg((
 +        select element i.l_extendedprice
 +        from  l as i
-     )),'ave_disc':tpch.avg((
++    )),'ave_disc':tpch.coll_avg((
 +        select element i.l_discount
 +        from  l as i
 +    )),'count_order':tpch.count(l)}
 +from  LineItem as l
 +where (l.l_shipdate <= '1998-09-02')
 +/* +hash */
 +group by l.l_returnflag as l_returnflag,l.l_linestatus as l_linestatus
 +order by l_returnflag,l_linestatus
 +;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q02_minimum_cost_supplier/q02_minimum_cost_supplier.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q02_minimum_cost_supplier/q02_minimum_cost_supplier.3.query.sqlpp
index a55c74d,0000000..499899d
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q02_minimum_cost_supplier/q02_minimum_cost_supplier.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q02_minimum_cost_supplier/q02_minimum_cost_supplier.3.query.sqlpp
@@@ -1,80 -1,0 +1,80 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +
 +use tpch;
 +
 +
 +declare function tmp1() {
 +(
 +    select element {'s_acctbal':pssrn.s_acctbal,'s_name':pssrn.s_name,'n_name':pssrn.n_name,'p_partkey':p.p_partkey,'ps_supplycost':pssrn.ps_supplycost,'p_mfgr':p.p_mfgr,'s_address':pssrn.s_address,'s_phone':pssrn.s_phone,'s_comment':pssrn.s_comment}
 +    from  Part as p,
 +          (
 +        select element {'n_name':srn.n_name,'p_partkey':ps.ps_partkey,'ps_supplycost':ps.ps_supplycost,'s_name':srn.s_name,'s_acctbal':srn.s_acctbal,'s_address':srn.s_address,'s_phone':srn.s_phone,'s_comment':srn.s_comment}
 +        from  Partsupp as ps,
 +              (
 +            select element {'s_suppkey':s.s_suppkey,'n_name':rn.n_name,'s_name':s.s_name,'s_acctbal':s.s_acctbal,'s_address':s.s_address,'s_phone':s.s_phone,'s_comment':s.s_comment}
 +            from  Supplier as s,
 +                  (
 +                select element {'n_nationkey':n.n_nationkey,'n_name':n.n_name}
 +                from  Region as r,
 +                      Nation as n
 +                where ((n.n_regionkey = r.r_regionkey) and (r.r_name = 'EUROPE'))
 +            ) as rn
 +            where (s.s_nationkey = rn.n_nationkey)
 +        ) as srn
 +        where (srn.s_suppkey = ps.ps_suppkey)
 +    ) as pssrn
 +    where ((p.p_partkey = pssrn.p_partkey) and tpch.like(p.p_type,'%BRASS'))
 +)
 +};
 +declare function tmp2() {
 +(
-     select element {'p_partkey':p_partkey,'ps_min_supplycost':tpch.min((
++    select element {'p_partkey':p_partkey,'ps_min_supplycost':tpch.coll_min((
 +            select element i.ps_supplycost
 +            from  pssrn as i
 +        ))}
 +    from  Part as p,
 +          (
 +        select element {'n_name':srn.n_name,'p_partkey':ps.ps_partkey,'ps_supplycost':ps.ps_supplycost,'s_name':srn.s_name,'s_acctbal':srn.s_acctbal,'s_address':srn.s_address,'s_phone':srn.s_phone,'s_comment':srn.s_comment}
 +        from  Partsupp as ps,
 +              (
 +            select element {'s_suppkey':s.s_suppkey,'n_name':rn.n_name,'s_name':s.s_name,'s_acctbal':s.s_acctbal,'s_address':s.s_address,'s_phone':s.s_phone,'s_comment':s.s_comment}
 +            from  Supplier as s,
 +                  (
 +                select element {'n_nationkey':n.n_nationkey,'n_name':n.n_name}
 +                from  Region as r,
 +                      Nation as n
 +                where ((n.n_regionkey = r.r_regionkey) and (r.r_name = 'EUROPE'))
 +            ) as rn
 +            where (s.s_nationkey = rn.n_nationkey)
 +        ) as srn
 +        where (srn.s_suppkey = ps.ps_suppkey)
 +    ) as pssrn
 +    where ((p.p_partkey = pssrn.p_partkey) and tpch.like(p.p_type,'%BRASS'))
 +    /* +hash */
 +    group by pssrn.p_partkey as p_partkey
 +)
 +};
 +select element {'s_acctbal':t1.s_acctbal,'s_name':t1.s_name,'n_name':t1.n_name,'p_partkey':t1.p_partkey,'p_mfgr':t1.p_mfgr,'s_address':t1.s_address,'s_phone':t1.s_phone,'s_comment':t1.s_comment}
 +from  tpch.tmp2() as t2,
 +      tpch.tmp1() as t1
 +where ((t1.p_partkey = t2.p_partkey) and (t1.ps_supplycost = t2.ps_min_supplycost))
 +order by t1.s_acctbal desc,t1.n_name,t1.s_name,t1.p_partkey
 +limit 100
 +;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q03_shipping_priority_nt/q03_shipping_priority_nt.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q03_shipping_priority_nt/q03_shipping_priority_nt.3.query.sqlpp
index d30c6d3,0000000..bddbb53
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q03_shipping_priority_nt/q03_shipping_priority_nt.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q03_shipping_priority_nt/q03_shipping_priority_nt.3.query.sqlpp
@@@ -1,36 -1,0 +1,36 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +
 +use tpch;
 +
 +
 +select element {'l_orderkey':l_orderkey,'revenue':revenue,'o_orderdate':o_orderdate,'o_shippriority':o_shippriority}
 +from  Customer as c,
 +      Orders as o,
 +      LineItem as l
 +where (((c.c_mktsegment = 'BUILDING') and (c.c_custkey = o.o_custkey)) and ((l.l_orderkey = o.o_orderkey) and (o.o_orderdate < '1995-03-15') and (l.l_shipdate > '1995-03-15')))
 +/* +hash */
 +group by l.l_orderkey as l_orderkey,o.o_orderdate as o_orderdate,o.o_shippriority as o_shippriority
- with  revenue as tpch.sum((
++with  revenue as tpch.coll_sum((
 +      select element (i.l_extendedprice * (1 - i.l_discount))
 +      from  l as i
 +  ))
 +order by revenue desc,o_orderdate
 +limit 10
 +;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q05_local_supplier_volume/q05_local_supplier_volume.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q05_local_supplier_volume/q05_local_supplier_volume.3.query.sqlpp
index 150e4b1,0000000..9050001
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q05_local_supplier_volume/q05_local_supplier_volume.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q05_local_supplier_volume/q05_local_supplier_volume.3.query.sqlpp
@@@ -1,54 -1,0 +1,54 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +
 +use tpch;
 +
 +
 +select element {'n_name':n_name,'revenue':revenue}
 +from  Customer as c,
 +      (
 +    select element {'n_name':l1.n_name,'l_extendedprice':l1.l_extendedprice,'l_discount':l1.l_discount,'s_nationkey':l1.s_nationkey,'o_custkey':o.o_custkey}
 +    from  Orders as o,
 +          (
 +        select element {'n_name':s1.n_name,'l_extendedprice':l.l_extendedprice,'l_discount':l.l_discount,'l_orderkey':l.l_orderkey,'s_nationkey':s1.s_nationkey}
 +        from  LineItem as l,
 +              (
 +            select element {'n_name':n1.n_name,'s_suppkey':s.s_suppkey,'s_nationkey':s.s_nationkey}
 +            from  Supplier as s,
 +                  (
 +                select element {'n_name':n.n_name,'n_nationkey':n.n_nationkey}
 +                from  Nation as n,
 +                      Region as r
 +                where (n.n_regionkey = r.r_regionkey)
 +            ) as n1
 +            where (s.s_nationkey = n1.n_nationkey)
 +        ) as s1
 +        where (l.l_suppkey = s1.s_suppkey)
 +    ) as l1
 +    where ((l1.l_orderkey = o.o_orderkey) and (o.o_orderdate >= '1990-01-01') and (o.o_orderdate < '1995-01-01'))
 +) as o1
 +where ((c.c_nationkey = o1.s_nationkey) and (c.c_custkey = o1.o_custkey))
 +/* +hash */
 +group by o1.n_name as n_name
- with  revenue as tpch.sum((
++with  revenue as tpch.coll_sum((
 +      select element (i.l_extendedprice * (1 - i.l_discount))
 +      from  o1 as i
 +  ))
 +order by revenue desc
 +;



[25/50] [abbrv] incubator-asterixdb git commit: Merge branch 'master' into hyracks-merge2

Posted by im...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q06_forecast_revenue_change/q06_forecast_revenue_change.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q06_forecast_revenue_change/q06_forecast_revenue_change.3.query.sqlpp
index 9dee61f,0000000..e1012b1
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q06_forecast_revenue_change/q06_forecast_revenue_change.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q06_forecast_revenue_change/q06_forecast_revenue_change.3.query.sqlpp
@@@ -1,27 -1,0 +1,27 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +
 +use tpch;
 +
 +
- {'revenue':tpch.sum((
++{'revenue':tpch.coll_sum((
 +    select element (l.l_extendedprice * l.l_discount)
 +    from  LineItem as l
 +    where ((l.l_shipdate >= '1994-01-01') and (l.l_shipdate < '1995-01-01') and (l.l_discount >= 0.05) and (l.l_discount <= 0.07) and (l.l_quantity < 24))
 +))};

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q07_volume_shipping/q07_volume_shipping.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q07_volume_shipping/q07_volume_shipping.3.query.sqlpp
index 74d3b5a,0000000..f2838d7
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q07_volume_shipping/q07_volume_shipping.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q07_volume_shipping/q07_volume_shipping.3.query.sqlpp
@@@ -1,57 -1,0 +1,57 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +
 +use tpch;
 +
 +
 +declare function q7_volume_shipping_tmp() {
 +(
 +    select element {'supp_nation':n1.n_name,'cust_nation':n2.n_name,'s_nationkey':n1.n_nationkey,'c_nationkey':n2.n_nationkey}
 +    from  Nation as n1,
 +          Nation as n2
 +    where ((n2.n_name = 'GERMANY') or (n1.n_name = 'GERMANY'))
 +)
 +};
 +select element {'supp_nation':supp_nation,'cust_nation':cust_nation,'l_year':l_year,'revenue':revenue}
 +from  (
 +    select element {'l_shipdate':loc.l_shipdate,'l_extendedprice':loc.l_extendedprice,'l_discount':loc.l_discount,'c_nationkey':loc.c_nationkey,'s_nationkey':s.s_nationkey}
 +    from  (
 +        select element {'l_shipdate':lo.l_shipdate,'l_extendedprice':lo.l_extendedprice,'l_discount':lo.l_discount,'l_suppkey':lo.l_suppkey,'c_nationkey':c.c_nationkey}
 +        from  (
 +            select element {'l_shipdate':l.l_shipdate,'l_extendedprice':l.l_extendedprice,'l_discount':l.l_discount,'l_suppkey':l.l_suppkey,'o_custkey':o.o_custkey}
 +            from  LineItem as l,
 +                  Orders as o
 +            where ((o.o_orderkey = l.l_orderkey) and (l.l_shipdate >= '1992-01-01') and (l.l_shipdate <= '1996-12-31'))
 +        ) as lo,
 +              Customer as c
 +        where (c.c_custkey = lo.o_custkey)
 +    ) as loc,
 +          Supplier as s
 +    where (s.s_suppkey = loc.l_suppkey)
 +) as locs,
 +      tpch.q7_volume_shipping_tmp() as t
 +with  l_year0 as tpch."get-year"(locs.l_shipdate)
 +where ((locs.c_nationkey = t.c_nationkey) and (locs.s_nationkey = t.s_nationkey))
 +group by t.supp_nation as supp_nation,t.cust_nation as cust_nation,l_year0 as l_year
- with  revenue as tpch.sum((
++with  revenue as tpch.coll_sum((
 +      select element (i.l_extendedprice * (1 - i.l_discount))
 +      from  locs as i
 +  ))
 +order by supp_nation,cust_nation,l_year
 +;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q08_national_market_share/q08_national_market_share.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q08_national_market_share/q08_national_market_share.3.query.sqlpp
index 77d3881,0000000..a221b8e
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q08_national_market_share/q08_national_market_share.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q08_national_market_share/q08_national_market_share.3.query.sqlpp
@@@ -1,69 -1,0 +1,69 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +
 +use tpch;
 +
 +
- select element {'year':year,'mkt_share':(tpch.sum((
++select element {'year':year,'mkt_share':(tpch.coll_sum((
 +          select element tpch."switch-case"((i.s_name = 'BRAZIL'),true,i.revenue,false,0.0)
 +          from  t as i
-       )) / tpch.sum((
++      )) / tpch.coll_sum((
 +          select element i.revenue
 +          from  t as i
 +      )))}
 +from  (
 +    select element {'year':o_year,'revenue':(slnrcop.l_extendedprice * (1 - slnrcop.l_discount)),'s_name':n2.n_name}
 +    from  (
 +        select element {'o_orderdate':lnrcop.o_orderdate,'l_discount':lnrcop.l_discount,'l_extendedprice':lnrcop.l_extendedprice,'l_suppkey':lnrcop.l_suppkey,'s_nationkey':s.s_nationkey}
 +        from  Supplier as s,
 +              (
 +            select element {'o_orderdate':lnrco.o_orderdate,'l_discount':lnrco.l_discount,'l_extendedprice':lnrco.l_extendedprice,'l_suppkey':lnrco.l_suppkey}
 +            from  (
 +                select element {'o_orderdate':nrco.o_orderdate,'l_partkey':l.l_partkey,'l_discount':l.l_discount,'l_extendedprice':l.l_extendedprice,'l_suppkey':l.l_suppkey}
 +                from  LineItem as l,
 +                      (
 +                    select element {'o_orderdate':o.o_orderdate,'o_orderkey':o.o_orderkey}
 +                    from  Orders as o,
 +                          (
 +                        select element {'c_custkey':c.c_custkey}
 +                        from  Customer as c,
 +                              (
 +                            select element {'n_nationkey':n1.n_nationkey}
 +                            from  Nation as n1,
 +                                  Region as r1
 +                            where ((n1.n_regionkey = r1.r_regionkey) and (r1.r_name = 'AMERICA'))
 +                        ) as nr
 +                        where (c.c_nationkey = nr.n_nationkey)
 +                    ) as nrc
 +                    where (nrc.c_custkey = o.o_custkey)
 +                ) as nrco
 +                where ((l.l_orderkey = nrco.o_orderkey) and (nrco.o_orderdate >= '1995-01-01') and (nrco.o_orderdate < '1996-12-31'))
 +            ) as lnrco,
 +                  Part as p
 +            where ((p.p_partkey = lnrco.l_partkey) and (p.p_type = 'ECONOMY ANODIZED STEEL'))
 +        ) as lnrcop
 +        where (s.s_suppkey = lnrcop.l_suppkey)
 +    ) as slnrcop,
 +          Nation as n2
 +    with  o_year as tpch."get-year"(slnrcop.o_orderdate)
 +    where (slnrcop.s_nationkey = n2.n_nationkey)
 +) as t
 +group by t.year as year
 +order by year
 +;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q09_product_type_profit_nt/q09_product_type_profit_nt.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q09_product_type_profit_nt/q09_product_type_profit_nt.3.query.sqlpp
index b7e5e4b,0000000..a76e49e
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q09_product_type_profit_nt/q09_product_type_profit_nt.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q09_product_type_profit_nt/q09_product_type_profit_nt.3.query.sqlpp
@@@ -1,57 -1,0 +1,57 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +
 +use tpch;
 +
 +
- select element {'nation':nation,'o_year':o_year,'sum_profit':tpch.sum((
++select element {'nation':nation,'o_year':o_year,'sum_profit':tpch.coll_sum((
 +        select element pr.amount
 +        from  profit as pr
 +    ))}
 +from  (
 +    select element {'nation':l3.n_name,'o_year':o_year,'amount':amount}
 +    from  Orders as o,
 +          (
 +        select element {'l_extendedprice':l2.l_extendedprice,'l_discount':l2.l_discount,'l_quantity':l2.l_quantity,'l_orderkey':l2.l_orderkey,'n_name':l2.n_name,'ps_supplycost':l2.ps_supplycost}
 +        from  Part as p join
 +              (
 +            select element {'l_extendedprice':l1.l_extendedprice,'l_discount':l1.l_discount,'l_quantity':l1.l_quantity,'l_partkey':l1.l_partkey,'l_orderkey':l1.l_orderkey,'n_name':l1.n_name,'ps_supplycost':ps.ps_supplycost}
 +            from  Partsupp as ps join
 +                  (
 +                select element {'l_suppkey':l.l_suppkey,'l_extendedprice':l.l_extendedprice,'l_discount':l.l_discount,'l_quantity':l.l_quantity,'l_partkey':l.l_partkey,'l_orderkey':l.l_orderkey,'n_name':s1.n_name}
 +                from  (
 +                    select element {'s_suppkey':s.s_suppkey,'n_name':n.n_name}
 +                    from  Supplier as s,
 +                          Nation as n
 +                    where (n.n_nationkey = s.s_nationkey)
 +                ) as s1 join
 +                      LineItem as l
 +                on (s1.s_suppkey = l.l_suppkey)
 +            ) as l1
 +            on ((ps.ps_suppkey = l1.l_suppkey) and (ps.ps_partkey = l1.l_partkey))
 +        ) as l2
 +         on (tpch.contains(p.p_name,'green') and (p.p_partkey = l2.l_partkey))
 +    ) as l3
 +    with  amount as ((l3.l_extendedprice * (1 - l3.l_discount)) - (l3.ps_supplycost * l3.l_quantity)),
 +          o_year as tpch."get-year"(o.o_orderdate)
 +    where (o.o_orderkey = l3.l_orderkey)
 +) as profit
 +group by profit.nation as nation,profit.o_year as o_year
 +order by nation,o_year desc
 +;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q10_returned_item/q10_returned_item.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q10_returned_item/q10_returned_item.3.query.sqlpp
index 50fb6c3,0000000..7278f81
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q10_returned_item/q10_returned_item.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q10_returned_item/q10_returned_item.3.query.sqlpp
@@@ -1,43 -1,0 +1,43 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +
 +use tpch;
 +
 +
 +select element {'c_custkey':c_custkey,'c_name':c_name,'revenue':revenue,'c_acctbal':c_acctbal,'n_name':n_name,'c_address':c_address,'c_phone':c_phone,'c_comment':c_comment}
 +from  (
 +    select element {'c_custkey':ocn.c_custkey,'c_name':ocn.c_name,'c_acctbal':ocn.c_acctbal,'n_name':ocn.n_name,'c_address':ocn.c_address,'c_phone':ocn.c_phone,'c_comment':ocn.c_comment,'l_extendedprice':l.l_extendedprice,'l_discount':l.l_discount}
 +    from  LineItem as l,
 +          (
 +        select element {'c_custkey':c.c_custkey,'c_name':c.c_name,'c_acctbal':c.c_acctbal,'n_name':n.n_name,'c_address':c.c_address,'c_phone':c.c_phone,'c_comment':c.c_comment,'o_orderkey':o.o_orderkey}
 +        from  Orders as o,
 +              Customer as c,
 +              Nation as n
 +        where (((c.c_custkey = o.o_custkey) and (o.o_orderdate >= '1993-10-01') and (o.o_orderdate < '1994-01-01')) and (c.c_nationkey = n.n_nationkey))
 +    ) as ocn
 +    where ((l.l_orderkey = ocn.o_orderkey) and (l.l_returnflag = 'R'))
 +) as locn
 +group by locn.c_custkey as c_custkey,locn.c_name as c_name,locn.c_acctbal as c_acctbal,locn.c_phone as c_phone,locn.n_name as n_name,locn.c_address as c_address,locn.c_comment as c_comment
- with  revenue as tpch.sum((
++with  revenue as tpch.coll_sum((
 +      select element (i.l_extendedprice * (1 - i.l_discount))
 +      from  locn as i
 +  ))
 +order by revenue desc
 +limit 20
 +;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q10_returned_item_int64/q10_returned_item_int64.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q10_returned_item_int64/q10_returned_item_int64.3.query.sqlpp
index 50fb6c3,0000000..7278f81
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q10_returned_item_int64/q10_returned_item_int64.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q10_returned_item_int64/q10_returned_item_int64.3.query.sqlpp
@@@ -1,43 -1,0 +1,43 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +
 +use tpch;
 +
 +
 +select element {'c_custkey':c_custkey,'c_name':c_name,'revenue':revenue,'c_acctbal':c_acctbal,'n_name':n_name,'c_address':c_address,'c_phone':c_phone,'c_comment':c_comment}
 +from  (
 +    select element {'c_custkey':ocn.c_custkey,'c_name':ocn.c_name,'c_acctbal':ocn.c_acctbal,'n_name':ocn.n_name,'c_address':ocn.c_address,'c_phone':ocn.c_phone,'c_comment':ocn.c_comment,'l_extendedprice':l.l_extendedprice,'l_discount':l.l_discount}
 +    from  LineItem as l,
 +          (
 +        select element {'c_custkey':c.c_custkey,'c_name':c.c_name,'c_acctbal':c.c_acctbal,'n_name':n.n_name,'c_address':c.c_address,'c_phone':c.c_phone,'c_comment':c.c_comment,'o_orderkey':o.o_orderkey}
 +        from  Orders as o,
 +              Customer as c,
 +              Nation as n
 +        where (((c.c_custkey = o.o_custkey) and (o.o_orderdate >= '1993-10-01') and (o.o_orderdate < '1994-01-01')) and (c.c_nationkey = n.n_nationkey))
 +    ) as ocn
 +    where ((l.l_orderkey = ocn.o_orderkey) and (l.l_returnflag = 'R'))
 +) as locn
 +group by locn.c_custkey as c_custkey,locn.c_name as c_name,locn.c_acctbal as c_acctbal,locn.c_phone as c_phone,locn.n_name as n_name,locn.c_address as c_address,locn.c_comment as c_comment
- with  revenue as tpch.sum((
++with  revenue as tpch.coll_sum((
 +      select element (i.l_extendedprice * (1 - i.l_discount))
 +      from  locn as i
 +  ))
 +order by revenue desc
 +limit 20
 +;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q11_important_stock/q11_important_stock.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q11_important_stock/q11_important_stock.3.query.sqlpp
index ea3bd5a,0000000..073835c
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q11_important_stock/q11_important_stock.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q11_important_stock/q11_important_stock.3.query.sqlpp
@@@ -1,52 -1,0 +1,52 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +
 +use tpch;
 +
 +
- with  sum as tpch.sum((
++with  sum as tpch.coll_sum((
 +      select element (ps.ps_supplycost * ps.ps_availqty)
 +      from  Partsupp as ps,
 +            (
 +          select element {'s_suppkey':s.s_suppkey}
 +          from  Supplier as s,
 +                Nation as n
 +          where (s.s_nationkey = n.n_nationkey)
 +      ) as sn
 +      where (ps.ps_suppkey = sn.s_suppkey)
 +  ))
 +select element {'partkey':t1.ps_partkey,'part_value':t1.part_value}
 +from  (
-     select element {'ps_partkey':ps_partkey,'part_value':tpch.sum((
++    select element {'ps_partkey':ps_partkey,'part_value':tpch.coll_sum((
 +            select element (i.ps_supplycost * i.ps_availqty)
 +            from  ps as i
 +        ))}
 +    from  Partsupp as ps,
 +          (
 +        select element {'s_suppkey':s.s_suppkey}
 +        from  Supplier as s,
 +              Nation as n
 +        where (s.s_nationkey = n.n_nationkey)
 +    ) as sn
 +    where (ps.ps_suppkey = sn.s_suppkey)
 +    group by ps.ps_partkey as ps_partkey
 +) as t1
 +where (t1.part_value > (sum * 0.00001))
 +order by t1.part_value desc
 +;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q12_shipping/q12_shipping.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q12_shipping/q12_shipping.3.query.sqlpp
index 61b685e,0000000..2bf33fc
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q12_shipping/q12_shipping.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q12_shipping/q12_shipping.3.query.sqlpp
@@@ -1,35 -1,0 +1,35 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +
 +use tpch;
 +
 +
- select element {'l_shipmode':l_shipmode,'high_line_count':tpch.sum((
++select element {'l_shipmode':l_shipmode,'high_line_count':tpch.coll_sum((
 +        select element tpch."switch-case"(((i.o_orderpriority = '1-URGENT') or (i.o_orderpriority = '2-HIGH')),true,1,false,0)
 +        from  o as i
-     )),'low_line_count':tpch.sum((
++    )),'low_line_count':tpch.coll_sum((
 +        select element tpch."switch-case"(((i.o_orderpriority = '1-URGENT') or (i.o_orderpriority = '2-HIGH')),true,0,false,1)
 +        from  o as i
 +    ))}
 +from  LineItem as l,
 +      Orders as o
 +where ((o.o_orderkey = l.l_orderkey) and (l.l_commitdate < l.l_receiptdate) and (l.l_shipdate < l.l_commitdate) and (l.l_receiptdate >= '1994-01-01') and (l.l_receiptdate < '1995-01-01') and ((l.l_shipmode = 'MAIL') or (l.l_shipmode = 'SHIP')))
 +group by l.l_shipmode as l_shipmode
 +order by l_shipmode
 +;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q13_customer_distribution/q13_customer_distribution.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q13_customer_distribution/q13_customer_distribution.3.query.sqlpp
index afa7c9b,0000000..d21e1f5
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q13_customer_distribution/q13_customer_distribution.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q13_customer_distribution/q13_customer_distribution.3.query.sqlpp
@@@ -1,44 -1,0 +1,44 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +
 +use tpch;
 +
 +
 +set "import-private-functions" "true";
 +
 +select element {'c_count':c_count,'custdist':custdist}
 +from  (
-     select element {'c_custkey':c_custkey,'c_count':tpch.sum((
++    select element {'c_custkey':c_custkey,'c_count':tpch.coll_sum((
 +            select element i.o_orderkey_count
 +            from  co as i
 +        ))}
 +    from  (
-         select element {'c_custkey':c.c_custkey,'o_orderkey_count':tpch.count((
++        select element {'c_custkey':c.c_custkey,'o_orderkey_count':coll_count((
 +                select element o.o_orderkey
 +                from  Orders as o
 +                where ((c.c_custkey = o.o_custkey) and tpch.not(tpch.like(o.o_comment,'%special%requests%')))
 +            ))}
 +        from  Customer as c
 +    ) as co
 +    group by co.c_custkey as c_custkey
 +) as gco
 +group by gco.c_count as c_count
 +with  custdist as tpch.count(gco)
 +order by custdist desc,c_count desc
 +;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q14_promotion_effect/q14_promotion_effect.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q14_promotion_effect/q14_promotion_effect.3.query.sqlpp
index a8f29d3,0000000..a376e45
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q14_promotion_effect/q14_promotion_effect.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q14_promotion_effect/q14_promotion_effect.3.query.sqlpp
@@@ -1,35 -1,0 +1,35 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +
 +use tpch;
 +
 +
- select element (100.0 * tpch.sum((
++select element (100.0 * tpch.coll_sum((
 +      select element tpch."switch-case"(tpch.like(i.p_type,'PROMO%'),true,(i.l_extendedprice * (1 - i.l_discount)),false,0.0)
 +      from  lp as i
-   )) / tpch.sum((
++  )) / tpch.coll_sum((
 +      select element (i.l_extendedprice * (1 - i.l_discount))
 +      from  lp as i
 +  )))
 +from  LineItem as l,
 +      Part as p
 +let lp = {'p_type': p.p_type, 'l_extendedprice': l.l_extendedprice, 'l_discount': l.l_discount}
 +where ((l.l_partkey = p.p_partkey) and (l.l_shipdate >= '1995-09-01') and (l.l_shipdate < '1995-10-01'))
 +group by 1 as t
 +;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q15_top_supplier/q15_top_supplier.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q15_top_supplier/q15_top_supplier.3.query.sqlpp
index e21104b,0000000..fdc4fcc
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q15_top_supplier/q15_top_supplier.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q15_top_supplier/q15_top_supplier.3.query.sqlpp
@@@ -1,42 -1,0 +1,42 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +
 +use tpch;
 +
 +
 +declare function revenue() {
 +(
-     select element {'supplier_no':l_suppkey,'total_revenue':tpch.sum((
++    select element {'supplier_no':l_suppkey,'total_revenue':tpch.coll_sum((
 +            select element (i.l_extendedprice * (1 - i.l_discount))
 +            from  l as i
 +        ))}
 +    from  LineItem as l
 +    where ((l.l_shipdate >= '1996-01-01') and (l.l_shipdate < '1996-04-01'))
 +    group by l.l_suppkey as l_suppkey
 +)
 +};
- with  m as tpch.max((
++with  m as tpch.coll_max((
 +      select element r2.total_revenue
 +      from  tpch.revenue() as r2
 +  ))
 +select element {'s_suppkey':s.s_suppkey,'s_name':s.s_name,'s_address':s.s_address,'s_phone':s.s_phone,'total_revenue':r.total_revenue}
 +from  Supplier as s,
 +      tpch.revenue() as r
 +where ((s.s_suppkey = r.supplier_no) and (r.total_revenue < (m + 0.000000001)) and (r.total_revenue > (m - 0.000000001)))
 +;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q16_parts_supplier_relationship/q16_parts_supplier_relationship.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q16_parts_supplier_relationship/q16_parts_supplier_relationship.3.query.sqlpp
index cc4d01a,0000000..acf3281
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q16_parts_supplier_relationship/q16_parts_supplier_relationship.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q16_parts_supplier_relationship/q16_parts_supplier_relationship.3.query.sqlpp
@@@ -1,49 -1,0 +1,49 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +
 +use tpch;
 +
 +
 +declare function tmp() {
 +(
 +    select element {'p_brand':psp.p_brand,'p_type':psp.p_type,'p_size':psp.p_size,'ps_suppkey':psp.ps_suppkey}
 +    from  (
 +        select element {'p_brand':p.p_brand,'p_type':p.p_type,'p_size':p.p_size,'ps_suppkey':ps.ps_suppkey}
 +        from  Partsupp as ps,
 +              Part as p
 +        where ((p.p_partkey = ps.ps_partkey) and (p.p_brand != 'Brand#45') and tpch.not(tpch.like(p.p_type,'MEDIUM POLISHED%')))
 +    ) as psp,
 +          Supplier as s
 +    where ((psp.ps_suppkey = s.s_suppkey) and tpch.not(tpch.like(s.s_comment,'%Customer%Complaints%')))
 +)
 +};
 +select element {'p_brand':p_brand,'p_type':p_type,'p_size':p_size,'supplier_cnt':supplier_cnt}
 +from  (
 +    select element {'p_brand':p_brand1,'p_type':p_type1,'p_size':p_size1,'ps_suppkey':ps_suppkey1}
 +    from  tpch.tmp() as t
 +    where ((t.p_size = 49) or (t.p_size = 14) or (t.p_size = 23) or (t.p_size = 45) or (t.p_size = 19) or (t.p_size = 3) or (t.p_size = 36) or (t.p_size = 9))
 +    group by t.p_brand as p_brand1,t.p_type as p_type1,t.p_size as p_size1,t.ps_suppkey as ps_suppkey1
 +) as t2
 +group by t2.p_brand as p_brand,t2.p_type as p_type,t2.p_size as p_size
- with  supplier_cnt as tpch.count((
++with  supplier_cnt as coll_count((
 +      select element i.ps_suppkey
 +      from  t2 as i
 +  ))
 +order by supplier_cnt desc,p_brand,p_type,p_size
 +;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q17_large_gby_variant/q17_large_gby_variant.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q17_large_gby_variant/q17_large_gby_variant.3.query.sqlpp
index ada4f75,0000000..2e057d7
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q17_large_gby_variant/q17_large_gby_variant.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q17_large_gby_variant/q17_large_gby_variant.3.query.sqlpp
@@@ -1,57 -1,0 +1,57 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +
 +use tpch;
 +
 +
- select element {'t_partkey':l_partkey,'t_count':tpch.count(l),'t_avg_quantity':(0.2 * tpch.avg((
++select element {'t_partkey':l_partkey,'t_count':tpch.count(l),'t_avg_quantity':(0.2 * tpch.coll_avg((
 +          select element i.l_quantity
 +          from  l as i
-       ))),'t_max_suppkey':tpch.max((
++      ))),'t_max_suppkey':tpch.coll_max((
 +        select element i.l_suppkey
 +        from  l as i
-     )),'t_max_linenumber':tpch.max((
++    )),'t_max_linenumber':tpch.coll_max((
 +        select element i.l_linenumber
 +        from  l as i
-     )),'t_avg_extendedprice':tpch.avg((
++    )),'t_avg_extendedprice':tpch.coll_avg((
 +        select element i.l_extendedprice
 +        from  l as i
-     )),'t_avg_discount':tpch.avg((
++    )),'t_avg_discount':tpch.coll_avg((
 +        select element i.l_discount
 +        from  l as i
-     )),'t_avg_tax':tpch.avg((
++    )),'t_avg_tax':tpch.coll_avg((
 +        select element i.l_tax
 +        from  l as i
-     )),'t_max_shipdate':tpch.max((
++    )),'t_max_shipdate':tpch.coll_max((
 +        select element i.l_shipdate
 +        from  l as i
-     )),'t_min_commitdate':tpch.min((
++    )),'t_min_commitdate':tpch.coll_min((
 +        select element i.l_commitdate
 +        from  l as i
-     )),'t_min_receiptdate':tpch.min((
++    )),'t_min_receiptdate':tpch.coll_min((
 +        select element i.l_receiptdate
 +        from  l as i
-     )),'t_max_comment':tpch.max((
++    )),'t_max_comment':tpch.coll_max((
 +        select element i.l_comment
 +        from  l as i
 +    ))}
 +from  LineItem as l
 +group by l.l_partkey as l_partkey
 +order by l_partkey
 +;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q17_small_quantity_order_revenue/q17_small_quantity_order_revenue.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q17_small_quantity_order_revenue/q17_small_quantity_order_revenue.3.query.sqlpp
index 79c397c,0000000..ddbbecc
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q17_small_quantity_order_revenue/q17_small_quantity_order_revenue.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q17_small_quantity_order_revenue/q17_small_quantity_order_revenue.3.query.sqlpp
@@@ -1,40 -1,0 +1,40 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +
 +use tpch;
 +
 +
 +declare function tmp() {
 +(
-     select element {'t_partkey':l_partkey,'t_avg_quantity':(0.2 * tpch.avg((
++    select element {'t_partkey':l_partkey,'t_avg_quantity':(0.2 * tpch.coll_avg((
 +              select element i.l_quantity
 +              from  l as i
 +          )))}
 +    from  LineItem as l
 +    group by l.l_partkey as l_partkey
 +)
 +};
 +
- select element (tpch.sum((
++select element (tpch.coll_sum((
 +      select element l.l_extendedprice
 +      from  tpch.tmp() as t,
 +            LineItem as l,
 +            Part as p
 +      where p.p_partkey = l.l_partkey and p.p_container = 'MED BOX' and l.l_partkey = t.t_partkey and l.l_quantity < t.t_avg_quantity
 +  )) / 7.0);

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q18_large_volume_customer/q18_large_volume_customer.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q18_large_volume_customer/q18_large_volume_customer.3.query.sqlpp
index 89ff8f8,0000000..e0976a0
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q18_large_volume_customer/q18_large_volume_customer.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q18_large_volume_customer/q18_large_volume_customer.3.query.sqlpp
@@@ -1,42 -1,0 +1,42 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +
 +use tpch;
 +
 +
- select element {'c_name':c_name,'c_custkey':c_custkey,'o_orderkey':o_orderkey,'o_orderdate':o_orderdate,'o_totalprice':o_totalprice,'sum_quantity':tpch.sum((
++select element {'c_name':c_name,'c_custkey':c_custkey,'o_orderkey':o_orderkey,'o_orderdate':o_orderdate,'o_totalprice':o_totalprice,'sum_quantity':tpch.coll_sum((
 +        select element j.l_quantity
 +        from  l as j
 +    ))}
 +from  Customer as c,
 +      Orders as o,
 +      (
-     select element {'l_orderkey':l_orderkey,'t_sum_quantity':tpch.sum((
++    select element {'l_orderkey':l_orderkey,'t_sum_quantity':tpch.coll_sum((
 +            select element i.l_quantity
 +            from  l as i
 +        ))}
 +    from  LineItem as l
 +    group by l.l_orderkey as l_orderkey
 +) as t,
 +      LineItem as l
 +where ((c.c_custkey = o.o_custkey) and (o.o_orderkey = t.l_orderkey) and (t.t_sum_quantity > 30) and (l.l_orderkey = t.l_orderkey))
 +group by c.c_name as c_name,c.c_custkey as c_custkey,o.o_orderkey as o_orderkey,o.o_orderdate as o_orderdate,o.o_totalprice as o_totalprice
 +order by o_totalprice desc,o_orderdate
 +limit 100
 +;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q19_discounted_revenue/q19_discounted_revenue.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q19_discounted_revenue/q19_discounted_revenue.3.query.sqlpp
index 7065f87,0000000..f245189
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q19_discounted_revenue/q19_discounted_revenue.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q19_discounted_revenue/q19_discounted_revenue.3.query.sqlpp
@@@ -1,30 -1,0 +1,30 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +
 +use tpch;
 +
 +
 +set "import-private-functions" "true";
 +
- select element tpch.sum((
++select element tpch.coll_sum((
 +    select element (l.l_extendedprice * (1 - l.l_discount))
 +    from  LineItem as l,
 +          Part as p
 +    where ((p.p_partkey = l.l_partkey) and (((p.p_brand = 'Brand#12') and tpch."reg-exp"(p.p_container,'SM CASE||SM BOX||SM PACK||SM PKG') and (l.l_quantity >= 1) and (l.l_quantity <= 11) and (p.p_size >= 1) and (p.p_size <= 5) and tpch."reg-exp"(l.l_shipmode,'AIR||AIR REG') and (l.l_shipinstruct = 'DELIVER IN PERSON')) or ((p.p_brand = 'Brand#23') and tpch."reg-exp"(p.p_container,'MED BAG||MED BOX||MED PKG||MED PACK') and (l.l_quantity >= 10) and (l.l_quantity <= 20) and (p.p_size >= 1) and (p.p_size <= 10) and tpch."reg-exp"(l.l_shipmode,'AIR||AIR REG') and (l.l_shipinstruct = 'DELIVER IN PERSON')) or ((p.p_brand = 'Brand#34') and tpch."reg-exp"(p.p_container,'LG CASE||LG BOX||LG PACK||LG PKG') and (l.l_quantity >= 20) and (l.l_quantity <= 30) and (p.p_size >= 1) and (p.p_size <= 15) and tpch."reg-exp"(l.l_shipmode,'AIR||AIR REG') and (l.l_shipinstruct = 'DELIVER IN PERSON'))))
 +));

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q20_potential_part_promotion/q20_potential_part_promotion.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q20_potential_part_promotion/q20_potential_part_promotion.3.query.sqlpp
index 0657ad3,0000000..82e38bf
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q20_potential_part_promotion/q20_potential_part_promotion.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q20_potential_part_promotion/q20_potential_part_promotion.3.query.sqlpp
@@@ -1,53 -1,0 +1,53 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +
 +use tpch;
 +
 +
 +select element {'s_name':t4.s_name,'s_address':t4.s_address}
 +from  (
 +    select distinct element {'ps_suppkey':pst1.ps_suppkey}
 +    from  (
-         select element {'l_partkey':l_partkey,'l_suppkey':l_suppkey,'sum_quantity':(0.5 * tpch.sum((
++        select element {'l_partkey':l_partkey,'l_suppkey':l_suppkey,'sum_quantity':(0.5 * tpch.coll_sum((
 +                  select element i.l_quantity
 +                  from  l as i
 +              )))}
 +        from  LineItem as l
 +        group by l.l_partkey as l_partkey,l.l_suppkey as l_suppkey
 +    ) as t2,
 +          (
 +        select element {'ps_suppkey':ps.ps_suppkey,'ps_partkey':ps.ps_partkey,'ps_availqty':ps.ps_availqty}
 +        from  Partsupp as ps,
 +              (
 +            select distinct element {'p_partkey':p.p_partkey}
 +            from  Part as p
 +        ) as t1
 +        where (ps.ps_partkey = t1.p_partkey)
 +    ) as pst1
 +    where ((pst1.ps_partkey = t2.l_partkey) and (pst1.ps_suppkey = t2.l_suppkey) and (pst1.ps_availqty > t2.sum_quantity))
 +) as t3,
 +      (
 +    select element {'s_name':s.s_name,'s_address':s.s_address,'s_suppkey':s.s_suppkey}
 +    from  Nation as n,
 +          Supplier as s
 +    where (s.s_nationkey = n.n_nationkey)
 +) as t4
 +where (t3.ps_suppkey = t4.s_suppkey)
 +order by t4.s_name
 +;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q21_suppliers_who_kept_orders_waiting/q21_suppliers_who_kept_orders_waiting.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q21_suppliers_who_kept_orders_waiting/q21_suppliers_who_kept_orders_waiting.3.query.sqlpp
index f91068d,0000000..3ccb9b8
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q21_suppliers_who_kept_orders_waiting/q21_suppliers_who_kept_orders_waiting.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q21_suppliers_who_kept_orders_waiting/q21_suppliers_who_kept_orders_waiting.3.query.sqlpp
@@@ -1,80 -1,0 +1,80 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +
 +use tpch;
 +
 +
 +declare function tmp1() {
 +(
-     select element {'l_orderkey':l_orderkey,'count_suppkey':tpch.count((
++    select element {'l_orderkey':l_orderkey,'count_suppkey':coll_count((
 +            select element i.l_suppkey
 +            from  l2 as i
-         )),'max_suppkey':tpch.max((
++        )),'max_suppkey':tpch.coll_max((
 +            select element i.l_suppkey
 +            from  l2 as i
 +        ))}
 +    from  (
 +        select element {'l_orderkey':l_orderkey1,'l_suppkey':l_suppkey1}
 +        from  LineItem as l
 +        group by l.l_orderkey as l_orderkey1,l.l_suppkey as l_suppkey1
 +    ) as l2
 +    group by l2.l_orderkey as l_orderkey
 +)
 +};
 +declare function tmp2() {
 +(
-     select element {'l_orderkey':l_orderkey,'count_suppkey':tpch.count((
++    select element {'l_orderkey':l_orderkey,'count_suppkey':coll_count((
 +            select element i.l_suppkey
 +            from  l2 as i
-         )),'max_suppkey':tpch.max((
++        )),'max_suppkey':tpch.coll_max((
 +            select element i.l_suppkey
 +            from  l2 as i
 +        ))}
 +    from  (
 +        select element {'l_orderkey':l_orderkey1,'l_suppkey':l_suppkey1}
 +        from  LineItem as l
 +        where (l.l_receiptdate > l.l_commitdate)
 +        group by l.l_orderkey as l_orderkey1,l.l_suppkey as l_suppkey1
 +    ) as l2
 +    group by l2.l_orderkey as l_orderkey
 +)
 +};
 +select element {'s_name':s_name,'numwait':numwait}
 +from  (
 +    select element {'s_name':t3.s_name,'l_suppkey':t3.l_suppkey,'l_orderkey':t2.l_orderkey,'count_suppkey':t2.count_suppkey,'max_suppkey':t2.max_suppkey}
 +    from  (
 +            select element {'s_name':ns.s_name,'l_orderkey':t1.l_orderkey,'l_suppkey':l.l_suppkey}
 +            from  LineItem as l,
 +                  (
 +                        select element {'s_name':s.s_name,'s_suppkey':s.s_suppkey}
 +                        from  Nation as n,
 +                        Supplier as s
 +                        where (s.s_nationkey = n.n_nationkey)
 +                   ) as ns,
 +                   Orders as o,
 +                   tpch.tmp1() as t1
 +            where ns.s_suppkey = l.l_suppkey and l.l_receiptdate > l.l_commitdate and o.o_orderkey = l.l_orderkey and l.l_orderkey = t1.l_orderkey
 +    ) as t3,
 +      tpch.tmp2() as t2
 +    where ((t2.count_suppkey >= 0) and (t3.l_orderkey = t2.l_orderkey))
 +) as t4
 +group by t4.s_name as s_name
 +with  numwait as tpch.count(t4)
 +order by numwait desc,s_name
 +;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q22_global_sales_opportunity/q22_global_sales_opportunity.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q22_global_sales_opportunity/q22_global_sales_opportunity.3.query.sqlpp
index 6136008,0000000..328c753
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q22_global_sales_opportunity/q22_global_sales_opportunity.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/q22_global_sales_opportunity/q22_global_sales_opportunity.3.query.sqlpp
@@@ -1,42 -1,0 +1,42 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +
 +use tpch;
 +
 +
 +declare function q22_customer_tmp() {
 +(
 +    select element {'c_acctbal':c.c_acctbal,'c_custkey':c.c_custkey,'cntrycode':tpch.substring(c.c_phone,1,2)}
 +    from  Customer as c
 +)
 +};
- with  avg as tpch.avg((
++with  avg as tpch.coll_avg((
 +      select element c.c_acctbal
 +      from  Customer as c
 +      where (c.c_acctbal > 0.0)
 +  ))
- select element {'cntrycode':cntrycode,'numcust':tpch.count(ct),'totacctbal':tpch.sum((
++select element {'cntrycode':cntrycode,'numcust':tpch.count(ct),'totacctbal':tpch.coll_sum((
 +        select element i.c_acctbal
 +        from  ct as i
 +    ))}
 +from  tpch.q22_customer_tmp() as ct
 +where (ct.c_acctbal > avg)
 +group by ct.cntrycode as cntrycode
 +order by cntrycode
 +;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/query-issue562/query-issue562.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/query-issue562/query-issue562.3.query.sqlpp
index ac54a30,0000000..eaac9ce
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/query-issue562/query-issue562.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/query-issue562/query-issue562.3.query.sqlpp
@@@ -1,55 -1,0 +1,55 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +/*
 + * Description  : This test case is to verify the fix for issue562
 + * https://code.google.com/p/asterixdb/issues/detail?id=562
 + * Expected Res : SUCCESS
 + * Date         : 15th Jan. 2015
 + */
 +
 +use tpch;
 +
 +
 +declare function q22_customer_tmp() {
 +(
 +    select element {'c_acctbal':c.c_acctbal,'c_custkey':c.c_custkey,'cntrycode':phone_substr}
 +    from  Customer as c
 +    with  phone_substr as tpch.substring(c.c_phone,1,2)
 +    where ((phone_substr = '13') or (phone_substr = '31') or (phone_substr = '23') or (phone_substr = '29') or (phone_substr = '30') or (phone_substr = '18') or (phone_substr = '17'))
 +)
 +};
- with  avg as tpch.avg((
++with  avg as tpch.coll_avg((
 +      select element c.c_acctbal
 +      from  Customer as c
 +      with  phone_substr as tpch.substring(c.c_phone,1,2)
 +      where ((c.c_acctbal > 0.0) and ((phone_substr = '13') or (phone_substr = '31') or (phone_substr = '23') or (phone_substr = '29') or (phone_substr = '30') or (phone_substr = '18') or (phone_substr = '17')))
 +  ))
- select element {'cntrycode':cntrycode,'numcust':tpch.count(ct),'totacctbal':tpch.sum((
++select element {'cntrycode':cntrycode,'numcust':tpch.count(ct),'totacctbal':tpch.coll_sum((
 +        select element i.c_acctbal
 +        from  ct as i
 +    ))}
 +from  tpch.q22_customer_tmp() as ct
- where (tpch.count((
++where (coll_count((
 +    select element o
 +    from  Orders as o
 +    where (ct.c_custkey = o.o_custkey)
 +)) = 0)
 +group by ct.cntrycode as cntrycode
 +order by cntrycode
 +;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/query-issue785-2/query-issue785-2.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/query-issue785-2/query-issue785-2.3.query.sqlpp
index 2cad6ea,0000000..de434ef
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/query-issue785-2/query-issue785-2.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/query-issue785-2/query-issue785-2.3.query.sqlpp
@@@ -1,55 -1,0 +1,55 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +/*
 + * Description  : This test case is to verify the fix for issue785
 + * https://code.google.com/p/asterixdb/issues/detail?id=785
 + * Expected Res : SUCCESS
 + * Date         : 2nd Oct. 2014
 + */
 +
 +use tpch;
 +
 +
 +with  t as (
 +      select element {'n_nationkey':nation.n_nationkey,'n_name':nation.n_name}
 +      from  Nation as nation,
 +            SelectedNation as sn
 +      where (nation.n_nationkey = sn.n_nationkey)
 +  ),
 +      X as (
 +      select element {'nation_key':nation_key,'order_date':orderdate,'sum_price':sum}
 +      from  t as n,
 +            Customer as customer,
 +            Orders as orders
 +      where ((orders.o_custkey = customer.c_custkey) and (customer.c_nationkey = n.n_nationkey))
 +      group by orders.o_orderdate as orderdate,n.n_nationkey as nation_key
-       with  sum as tpch.sum((
++      with  sum as tpch.coll_sum((
 +            select element o.o_totalprice
 +            from  orders as o
 +        ))
 +  )
 +select element {'nation_key':nation_key,'sum_price':(
 +        select element {'orderdate':y.order_date,'sum_price':y.sum_price}
 +        from  x as y
 +        order by y.sum_price desc
 +        limit 3
 +    )}
 +from  X as x
 +group by x.nation_key as nation_key
 +;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/query-issue785/query-issue785.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/query-issue785/query-issue785.3.query.sqlpp
index ed649ca,0000000..c7761f3
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/query-issue785/query-issue785.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/query-issue785/query-issue785.3.query.sqlpp
@@@ -1,52 -1,0 +1,52 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +/*
 + * Description  : This test case is to verify the fix for issue785
 + * https://code.google.com/p/asterixdb/issues/detail?id=785
 + * Expected Res : SUCCESS
 + * Date         : 2nd Oct. 2014
 + */
 +
 +use tpch;
 +
 +
 +select element {'nation_key':nation_key,'sum_price':(
 +        select element {'orderdate':od,'sum_price':sum}
 +        from  x as i
 +        group by i.order_date as od
-         with  sum as tpch.sum((
++        with  sum as tpch.coll_sum((
 +              select element s.sum_price
 +              from  i as s
 +          ))
 +        order by sum desc
 +        limit 3
 +    )}
 +from  (
-     select element {'nation_key':nation_key,'order_date':orderdate,'sum_price':tpch.sum((
++    select element {'nation_key':nation_key,'order_date':orderdate,'sum_price':tpch.coll_sum((
 +            select element o.o_totalprice
 +            from  orders as o
 +        ))}
 +    from  Nation as n,
 +          Customer as customer,
 +          Orders as orders
 +    where ((orders.o_custkey = customer.c_custkey) and (customer.c_nationkey = n.n_nationkey))
 +    group by orders.o_orderdate as orderdate,n.n_nationkey as nation_key
 +) as x
 +group by x.nation_key as nation_key
 +;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/query-issue786/query-issue786.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/query-issue786/query-issue786.3.query.sqlpp
index dead643,0000000..37e3c92
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/query-issue786/query-issue786.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/query-issue786/query-issue786.3.query.sqlpp
@@@ -1,45 -1,0 +1,45 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +/*
 + * Description  : This test case is to verify the fix for issue786
 + * https://code.google.com/p/asterixdb/issues/detail?id=786
 + * Expected Res : SUCCESS
 + * Date         : 10th Oct. 2014
 + */
 +
 +use tpch;
 +
 +
 +select element {'nation_key':nation.n_nationkey,'name':nation.n_name,'aggregates':(
 +        select element {'order_date':orderdate,'sum_price':sum}
 +        from  Orders as orders,
 +              Customer as customer
 +        where ((orders.o_custkey = customer.c_custkey) and (customer.c_nationkey = nation.n_nationkey))
 +        group by orders.o_orderdate as orderdate
-         with  sum as tpch.sum((
++        with  sum as tpch.coll_sum((
 +              select element o.o_totalprice
 +              from  orders as o
 +          ))
 +        order by sum desc
 +        limit 3
 +    )}
 +from  Nation as nation,
 +      SelectedNation as sn
 +where (nation.n_nationkey = sn.sn_nationkey)
 +;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/query-issue810-2/query-issue810-2.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/query-issue810-2/query-issue810-2.3.query.sqlpp
index 16d3fe0,0000000..d75ea7b
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/query-issue810-2/query-issue810-2.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/query-issue810-2/query-issue810-2.3.query.sqlpp
@@@ -1,44 -1,0 +1,44 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +/*
 + * Description  : This test case is to verify the fix for issue810
 + * https://code.google.com/p/asterixdb/issues/detail?id=810
 + * Expected Res : SUCCESS
 + * Date         : 16th Nov. 2014
 + */
 +
 +use tpch;
 +
 +
- select element {'l_returnflag':l_returnflag,'l_linestatus':l_linestatus,'count_cheaps':tpch.count(cheaps),'total_charges':tpch.sum(charges)}
++select element {'l_returnflag':l_returnflag,'l_linestatus':l_linestatus,'count_cheaps':coll_count(cheaps),'total_charges':tpch.coll_sum(charges)}
 +from  LineItem as l
 +where (l.l_shipdate <= '1998-09-02')
 +/* +hash */
 +group by l.l_returnflag as l_returnflag,l.l_linestatus as l_linestatus
 +with  cheaps as (
 +      select element m
 +      from  l as m
 +      where (m.l_discount > 0.05)
 +  ),
 +      charges as (
 +      select element (a.l_extendedprice * (1 - a.l_discount) * (1 + a.l_tax))
 +      from  l as a
 +  )
 +order by l_returnflag,l_linestatus
 +;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/query-issue810-3/query-issue810-3.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/query-issue810-3/query-issue810-3.3.query.sqlpp
index 576192d,0000000..f2656ee
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/query-issue810-3/query-issue810-3.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/query-issue810-3/query-issue810-3.3.query.sqlpp
@@@ -1,53 -1,0 +1,53 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +/*
 + * Description  : This test case is to verify the fix for issue810
 + * https://code.google.com/p/asterixdb/issues/detail?id=810
 + * Expected Res : SUCCESS
 + * Date         : 16th Nov. 2014
 + */
 +
 +use tpch;
 +
 +
- select element {'l_returnflag':l_returnflag,'l_linestatus':l_linestatus,'count_cheaps':tpch.count(cheaps),'avg_expensive_discounts':tpch.avg(expensives),'sum_disc_prices':tpch.sum(disc_prices),'total_charges':tpch.sum(charges)}
++select element {'l_returnflag':l_returnflag,'l_linestatus':l_linestatus,'count_cheaps':coll_count(cheaps),'avg_expensive_discounts':tpch.coll_avg(expensives),'sum_disc_prices':tpch.coll_sum(disc_prices),'total_charges':tpch.coll_sum(charges)}
 +from  LineItem as l
 +where (l.l_shipdate <= '1998-09-02')
 +/* +hash */
 +group by l.l_returnflag as l_returnflag,l.l_linestatus as l_linestatus
 +with  expensives as (
 +      select element i.l_discount
 +      from  l as i
 +      where (i.l_discount <= 0.05)
 +  ),
 +      cheaps as (
 +      select element i
 +      from  l as i
 +      where (i.l_discount > 0.05)
 +  ),
 +      charges as (
 +      select element (i.l_extendedprice * (1 - i.l_discount) * (1 + i.l_tax))
 +      from  l as i
 +  ),
 +      disc_prices as (
 +      select element (i.l_extendedprice * (1 - i.l_discount))
 +      from  l as i
 +  )
 +order by l_returnflag,l_linestatus
 +;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/query-issue810/query-issue810.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/query-issue810/query-issue810.3.query.sqlpp
index 01e1654,0000000..f4638f2
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/query-issue810/query-issue810.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/query-issue810/query-issue810.3.query.sqlpp
@@@ -1,45 -1,0 +1,45 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +/*
 + * Description  : This test case is to verify the fix for issue810
 + * https://code.google.com/p/asterixdb/issues/detail?id=810
 + * Expected Res : SUCCESS
 + * Date         : 16th Nov. 2014
 + */
 +
 +use tpch;
 +
 +
- select element {'l_returnflag':l_returnflag,'l_linestatus':l_linestatus,'count_cheaps':tpch.count(cheap),'count_expensives':tpch.count(expensive)}
++select element {'l_returnflag':l_returnflag,'l_linestatus':l_linestatus,'count_cheaps':coll_count(cheap),'count_expensives':coll_count(expensive)}
 +from  LineItem as l
 +where (l.l_shipdate <= '1998-09-02')
 +/* +hash */
 +group by l.l_returnflag as l_returnflag,l.l_linestatus as l_linestatus
 +with  cheap as (
 +      select element m
 +      from  l as m
 +      where (m.l_discount > 0.05)
 +  ),
 +      expensive as (
 +      select element a
 +      from  l as a
 +      where (a.l_discount <= 0.05)
 +  )
 +order by l_returnflag,l_linestatus
 +;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/query-issue827-2/query-issue827-2.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/query-issue827-2/query-issue827-2.3.query.sqlpp
index 562f78f,0000000..aefe8a3
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/query-issue827-2/query-issue827-2.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/query-issue827-2/query-issue827-2.3.query.sqlpp
@@@ -1,55 -1,0 +1,55 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +/*
 + * Description  : This test case is to verify the fix for issue827
 + * https://code.google.com/p/asterixdb/issues/detail?id=827
 + * Expected Res : SUCCESS
 + * Date         : 3rd Dec. 2014
 + */
 +
 +use tpch;
 +
 +
- {'sum_qty_partial':tpch.sum((
++{'sum_qty_partial':tpch.coll_sum((
 +    select element i.l_quantity
 +    from  LineItem as i
 +    where (i.l_shipdate <= '1998-09-02')
- )),'sum_base_price':tpch.sum((
++)),'sum_base_price':tpch.coll_sum((
 +    select element i.l_extendedprice
 +    from  LineItem as i
- )),'sum_disc_price':tpch.sum((
++)),'sum_disc_price':tpch.coll_sum((
 +    select element (i.l_extendedprice * (1 - i.l_discount))
 +    from  LineItem as i
- )),'sum_charge':tpch.sum((
++)),'sum_charge':tpch.coll_sum((
 +    select element (i.l_extendedprice * (1 - i.l_discount) * (1 + i.l_tax))
 +    from  LineItem as i
- )),'ave_qty':tpch.avg((
++)),'ave_qty':tpch.coll_avg((
 +    select element i.l_quantity
 +    from  LineItem as i
 +    where (i.l_shipdate <= '1998-09-02')
- )),'ave_price':tpch.avg((
++)),'ave_price':tpch.coll_avg((
 +    select element i.l_extendedprice
 +    from  LineItem as i
- )),'ave_disc':tpch.avg((
++)),'ave_disc':tpch.coll_avg((
 +    select element i.l_discount
 +    from  LineItem as i
- )),'count_order':tpch.count((
++)),'count_order':coll_count((
 +    select element l
 +    from  LineItem as l
 +))};

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/query-issue827/query-issue827.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/query-issue827/query-issue827.3.query.sqlpp
index d056bcb,0000000..e735107
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/query-issue827/query-issue827.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch-with-index/query-issue827/query-issue827.3.query.sqlpp
@@@ -1,38 -1,0 +1,38 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +/*
 + * Description  : This test case is to verify the fix for issue827
 + * https://code.google.com/p/asterixdb/issues/detail?id=827
 + * Expected Res : SUCCESS
 + * Date         : 16th Nov. 2014
 + */
 +
 +use tpch;
 +
 +
- {'count_cheaps':tpch.count((
++{'count_cheaps':coll_count((
 +    select element l.l_quantity
 +    from  LineItem as l
- )),'count_expensives':tpch.sum((
++)),'count_expensives':tpch.coll_sum((
 +    select element e
 +    from  (
 +        select element l.l_extendedprice
 +        from  LineItem as l
 +    ) as e
 +))};

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch/nest_aggregate/nest_aggregate.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch/nest_aggregate/nest_aggregate.3.query.sqlpp
index 3a015ae,0000000..8efcb7f
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch/nest_aggregate/nest_aggregate.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch/nest_aggregate/nest_aggregate.3.query.sqlpp
@@@ -1,46 -1,0 +1,46 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +/*
 + * Description  : This test case is to verify the fix for issue782
 + * https://code.google.com/p/asterixdb/issues/detail?id=782
 + * Expected Res : SUCCESS
 + * Date         : 2nd Jun 2014
 + */
 +
 +use tpch;
 +
 +
 +select element {'nation_key':nation.n_nationkey,'name':nation.n_name,'aggregates':(
 +        select element {'order_date':orderdate,'sum_price':sum}
 +        from  Orders as orders,
 +              Customer as customer
 +        where ((orders.o_custkey = customer.c_custkey) and (customer.c_nationkey = nation.n_nationkey))
 +        group by orders.o_orderdate as orderdate
-         with  sum as tpch.sum((
++        with  sum as tpch.coll_sum((
 +              select element o.o_totalprice
 +              from  orders as o
 +          ))
 +        order by sum
 +        limit 3
 +    )}
 +from  Nation as nation,
 +      SelectedNation as sn
 +where (nation.n_nationkey /*+ indexnl */ = sn.n_nationkey)
 +order by nation.n_nationkey
 +;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch/nest_aggregate2/nest_aggregate2.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch/nest_aggregate2/nest_aggregate2.3.query.sqlpp
index 1aa687c,0000000..1b70f0e
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch/nest_aggregate2/nest_aggregate2.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch/nest_aggregate2/nest_aggregate2.3.query.sqlpp
@@@ -1,46 -1,0 +1,46 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +/*
 + * Description  : This test case is to verify the fix for issue782
 + * https://code.google.com/p/asterixdb/issues/detail?id=782
 + * Expected Res : SUCCESS
 + * Date         : 2nd Jun 2014
 + */
 +
 +use tpch;
 +
 +
 +select element {'nation_key':nation.n_nationkey,'name':nation.n_name,'aggregates':(
 +        select element orderdate
 +        from  Orders as orders,
 +              Customer as customer
 +        where ((orders.o_custkey = customer.c_custkey) and (customer.c_nationkey = nation.n_nationkey))
 +        group by orders.o_orderdate as orderdate
-         with  sum as tpch.sum((
++        with  sum as tpch.coll_sum((
 +              select element o.o_totalprice
 +              from  orders as o
 +          ))
 +        order by sum
 +        limit 3
 +    )}
 +from  Nation as nation,
 +      SelectedNation as sn
 +where (nation.n_nationkey /*+ indexnl */ = sn.n_nationkey)
 +order by nation.n_nationkey
 +;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch/q01_pricing_summary_report_nt/q01_pricing_summary_report_nt.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch/q01_pricing_summary_report_nt/q01_pricing_summary_report_nt.3.query.sqlpp
index 94b313f,0000000..686dd30
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch/q01_pricing_summary_report_nt/q01_pricing_summary_report_nt.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/tpch/q01_pricing_summary_report_nt/q01_pricing_summary_report_nt.3.query.sqlpp
@@@ -1,52 -1,0 +1,52 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +
 +use tpch;
 +
 +
 +set "import-private-functions" "true";
 +
- select element {'l_returnflag':l_returnflag,'l_linestatus':l_linestatus,'sum_qty':tpch.sum((
++select element {'l_returnflag':l_returnflag,'l_linestatus':l_linestatus,'sum_qty':COLL_SUM((
 +        select element i.l_quantity
 +        from  l as i
-     )),'sum_base_price':tpch.sum((
++    )),'sum_base_price':COLL_SUM((
 +        select element i.l_extendedprice
 +        from  l as i
-     )),'sum_disc_price':tpch.sum((
++    )),'sum_disc_price':COLL_SUM((
 +        select element (i.l_extendedprice * (1 - i.l_discount))
 +        from  l as i
-     )),'sum_charge':tpch.sum((
++    )),'sum_charge':COLL_SUM((
 +        select element (i.l_extendedprice * (1 - i.l_discount) * (1 + i.l_tax))
 +        from  l as i
-     )),'ave_qty':tpch.avg((
++    )),'ave_qty':COLL_AVG((
 +        select element i.l_quantity
 +        from  l as i
-     )),'ave_price':tpch.avg((
++    )),'ave_price':COLL_AVG((
 +        select element i.l_extendedprice
 +        from  l as i
-     )),'ave_disc':tpch.avg((
++    )),'ave_disc':COLL_AVG((
 +        select element i.l_discount
 +        from  l as i
-     )),'count_order':tpch.count(l)}
++    )),'count_order':COLL_COUNT(l)}
 +from  LineItem as l
 +where (l.l_shipdate <= '1998-09-02')
 +/* +hash */
 +group by l.l_returnflag as l_returnflag,l.l_linestatus as l_linestatus
 +order by l_returnflag,l_linestatus
 +;



[31/50] [abbrv] incubator-asterixdb git commit: Merge branch 'master' into hyracks-merge2

Posted by im...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/min_mixed/min_mixed.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/min_mixed/min_mixed.3.query.sqlpp
index f4b95b7,0000000..05d6102
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/min_mixed/min_mixed.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/min_mixed/min_mixed.3.query.sqlpp
@@@ -1,28 -1,0 +1,28 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +/*
- * Description  : Run sql-min over an ordered list with mixed types
++* Description  : Run coll_sql-min over an ordered list with mixed types
 +* Expected Res : Failure
 +* Date         : Feb 7th 2014
 +*/
 +
- select element "sql-min"((
++select element "coll_sql-min"((
 +    select element x
 +    from  [float('2.0'),'hello world',93847382783847382,date('2013-01-01')] as x
 +));

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/query-issue400/query-issue400.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/query-issue400/query-issue400.3.query.sqlpp
index 2df4a86,0000000..08a5659
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/query-issue400/query-issue400.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/query-issue400/query-issue400.3.query.sqlpp
@@@ -1,22 -1,0 +1,22 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +
- "sql-count"((select element i
++"coll_sql-count"((select element i
 +from  [[1,2,3,4,5],[6,7,8,9]] as i
 +));

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_avg/scalar_avg.1.ddl.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_avg/scalar_avg.1.ddl.sqlpp
index e81a84a,0000000..d9e647a
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_avg/scalar_avg.1.ddl.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_avg/scalar_avg.1.ddl.sqlpp
@@@ -1,26 -1,0 +1,26 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +/*
-  * Description    : Tests the scalar version of sql-avg without nulls.
++ * Description    : Tests the scalar version of coll_sql-avg without nulls.
 + * Success        : Yes
 + */
 +
 +drop  database test if exists;
 +create  database test;
 +

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_avg/scalar_avg.2.update.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_avg/scalar_avg.2.update.sqlpp
index 5f90a36,0000000..1bc9abf
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_avg/scalar_avg.2.update.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_avg/scalar_avg.2.update.sqlpp
@@@ -1,23 -1,0 +1,23 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +/*
-  * Description    : Tests the scalar version of sql-avg without nulls.
++ * Description    : Tests the scalar version of coll_sql-avg without nulls.
 + * Success        : Yes
 + */
 +

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_avg/scalar_avg.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_avg/scalar_avg.3.query.sqlpp
index 820e906,0000000..0c854e8
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_avg/scalar_avg.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_avg/scalar_avg.3.query.sqlpp
@@@ -1,35 -1,0 +1,35 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +/*
-  * Description    : Tests the scalar version of sql-avg without nulls.
++ * Description    : Tests the scalar version of coll_sql-avg without nulls.
 + * Success        : Yes
 + */
 +
 +use test;
 +
 +
- with  i8 as test."sql-avg"([test.int8('1'),test.int8('2'),test.int8('3')]),
-       i16 as test."sql-avg"([test.int16('1'),test.int16('2'),test.int16('3')]),
-       i32 as test."sql-avg"([test.int32('1'),test.int32('2'),test.int32('3')]),
-       i64 as test."sql-avg"([test.int64('1'),test.int64('2'),test.int64('3')]),
-       f as test."sql-avg"([test.float('1'),test.float('2'),test.float('3')]),
-       d as test."sql-avg"([test.double('1'),test.double('2'),test.double('3')])
++with  i8 as test."coll_sql-avg"([test.int8('1'),test.int8('2'),test.int8('3')]),
++      i16 as test."coll_sql-avg"([test.int16('1'),test.int16('2'),test.int16('3')]),
++      i32 as test."coll_sql-avg"([test.int32('1'),test.int32('2'),test.int32('3')]),
++      i64 as test."coll_sql-avg"([test.int64('1'),test.int64('2'),test.int64('3')]),
++      f as test."coll_sql-avg"([test.float('1'),test.float('2'),test.float('3')]),
++      d as test."coll_sql-avg"([test.double('1'),test.double('2'),test.double('3')])
 +select element i
 +from  [i8,i16,i32,i64,f,d] as i
 +;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_avg_empty/scalar_avg_empty.1.ddl.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_avg_empty/scalar_avg_empty.1.ddl.sqlpp
index 9e6913c,0000000..a1e77be
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_avg_empty/scalar_avg_empty.1.ddl.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_avg_empty/scalar_avg_empty.1.ddl.sqlpp
@@@ -1,26 -1,0 +1,26 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +/*
-  * Description    : Tests the scalar version of sql-avg with an empty list.
++ * Description    : Tests the scalar version of coll_sql-avg with an empty list.
 + * Success        : Yes
 + */
 +
 +drop  database test if exists;
 +create  database test;
 +

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_avg_empty/scalar_avg_empty.2.update.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_avg_empty/scalar_avg_empty.2.update.sqlpp
index 0bcef8b,0000000..920731d
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_avg_empty/scalar_avg_empty.2.update.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_avg_empty/scalar_avg_empty.2.update.sqlpp
@@@ -1,23 -1,0 +1,23 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +/*
-  * Description    : Tests the scalar version of sql-avg with an empty list.
++ * Description    : Tests the scalar version of coll_sql-avg with an empty list.
 + * Success        : Yes
 + */
 +

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_avg_empty/scalar_avg_empty.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_avg_empty/scalar_avg_empty.3.query.sqlpp
index 5b372b5,0000000..6cf01bd
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_avg_empty/scalar_avg_empty.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_avg_empty/scalar_avg_empty.3.query.sqlpp
@@@ -1,24 -1,0 +1,24 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +/*
-  * Description    : Tests the scalar version of sql-avg with an empty list.
++ * Description    : Tests the scalar version of coll_sql-avg with an empty list.
 + * Success        : Yes
 + */
 +
- select element "sql-avg"([]);
++select element "coll_sql-avg"([]);

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_avg_null/scalar_avg_null.1.ddl.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_avg_null/scalar_avg_null.1.ddl.sqlpp
index 1e717cf,0000000..0c78dd9
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_avg_null/scalar_avg_null.1.ddl.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_avg_null/scalar_avg_null.1.ddl.sqlpp
@@@ -1,26 -1,0 +1,26 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +/*
-  * Description    : Tests the scalar version of sql-avg with nulls.
++ * Description    : Tests the scalar version of coll_sql-avg with nulls.
 + * Success        : Yes
 + */
 +
 +drop  database test if exists;
 +create  database test;
 +

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_avg_null/scalar_avg_null.2.update.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_avg_null/scalar_avg_null.2.update.sqlpp
index 5212ef1,0000000..7d72098
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_avg_null/scalar_avg_null.2.update.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_avg_null/scalar_avg_null.2.update.sqlpp
@@@ -1,23 -1,0 +1,23 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +/*
-  * Description    : Tests the scalar version of sql-avg with nulls.
++ * Description    : Tests the scalar version of coll_sql-avg with nulls.
 + * Success        : Yes
 + */
 +

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_avg_null/scalar_avg_null.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_avg_null/scalar_avg_null.3.query.sqlpp
index 325c738,0000000..49286b5
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_avg_null/scalar_avg_null.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_avg_null/scalar_avg_null.3.query.sqlpp
@@@ -1,35 -1,0 +1,35 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +/*
-  * Description    : Tests the scalar version of sql-avg with nulls.
++ * Description    : Tests the scalar version of coll_sql-avg with nulls.
 + * Success        : Yes
 + */
 +
 +use test;
 +
 +
- with  i8 as test."sql-avg"([test.int8('1'),test.int8('2'),test.int8('3'),null]),
-       i16 as test."sql-avg"([test.int16('1'),test.int16('2'),test.int16('3'),null]),
-       i32 as test."sql-avg"([test.int32('1'),test.int32('2'),test.int32('3'),null]),
-       i64 as test."sql-avg"([test.int64('1'),test.int64('2'),test.int64('3'),null]),
-       f as test."sql-avg"([test.float('1'),test.float('2'),test.float('3'),null]),
-       d as test."sql-avg"([test.double('1'),test.double('2'),test.double('3'),null])
++with  i8 as test."coll_sql-avg"([test.int8('1'),test.int8('2'),test.int8('3'),null]),
++      i16 as test."coll_sql-avg"([test.int16('1'),test.int16('2'),test.int16('3'),null]),
++      i32 as test."coll_sql-avg"([test.int32('1'),test.int32('2'),test.int32('3'),null]),
++      i64 as test."coll_sql-avg"([test.int64('1'),test.int64('2'),test.int64('3'),null]),
++      f as test."coll_sql-avg"([test.float('1'),test.float('2'),test.float('3'),null]),
++      d as test."coll_sql-avg"([test.double('1'),test.double('2'),test.double('3'),null])
 +select element i
 +from  [i8,i16,i32,i64,f,d] as i
 +;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_count/scalar_count.1.ddl.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_count/scalar_count.1.ddl.sqlpp
index 1e52a55,0000000..40d8ac4
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_count/scalar_count.1.ddl.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_count/scalar_count.1.ddl.sqlpp
@@@ -1,26 -1,0 +1,26 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +/*
-  * Description    : Tests the scalar version of sql-count without nulls.
++ * Description    : Tests the scalar version of coll_sql-count without nulls.
 + * Success        : Yes
 + */
 +
 +drop  database test if exists;
 +create  database test;
 +

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_count/scalar_count.2.update.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_count/scalar_count.2.update.sqlpp
index 2268b81,0000000..089102e
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_count/scalar_count.2.update.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_count/scalar_count.2.update.sqlpp
@@@ -1,23 -1,0 +1,23 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +/*
-  * Description    : Tests the scalar version of sql-count without nulls.
++ * Description    : Tests the scalar version of coll_sql-count without nulls.
 + * Success        : Yes
 + */
 +

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_count/scalar_count.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_count/scalar_count.3.query.sqlpp
index b6a24fd,0000000..12a3607
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_count/scalar_count.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_count/scalar_count.3.query.sqlpp
@@@ -1,36 -1,0 +1,36 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +/*
-  * Description    : Tests the scalar version of sql-count without nulls.
++ * Description    : Tests the scalar version of coll_sql-count without nulls.
 + * Success        : Yes
 + */
 +
 +use test;
 +
 +
- with  i8 as test."sql-count"([test.int8('1'),test.int8('2'),test.int8('3')]),
-       i16 as test."sql-count"([test.int16('1'),test.int16('2'),test.int16('3')]),
-       i32 as test."sql-count"([test.int32('1'),test.int32('2'),test.int32('3')]),
-       i64 as test."sql-count"([test.int64('1'),test.int64('2'),test.int64('3')]),
-       f as test."sql-count"([test.float('1'),test.float('2'),test.float('3')]),
-       d as test."sql-count"([test.double('1'),test.double('2'),test.double('3')]),
-       s as test."sql-count"(['a','b','c'])
++with  i8 as test."coll_sql-count"([test.int8('1'),test.int8('2'),test.int8('3')]),
++      i16 as test."coll_sql-count"([test.int16('1'),test.int16('2'),test.int16('3')]),
++      i32 as test."coll_sql-count"([test.int32('1'),test.int32('2'),test.int32('3')]),
++      i64 as test."coll_sql-count"([test.int64('1'),test.int64('2'),test.int64('3')]),
++      f as test."coll_sql-count"([test.float('1'),test.float('2'),test.float('3')]),
++      d as test."coll_sql-count"([test.double('1'),test.double('2'),test.double('3')]),
++      s as test."coll_sql-count"(['a','b','c'])
 +select element i
 +from  [i8,i16,i32,i64,f,d,s] as i
 +;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_count_empty/scalar_count_empty.1.ddl.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_count_empty/scalar_count_empty.1.ddl.sqlpp
index cefaca4,0000000..99cd2ae
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_count_empty/scalar_count_empty.1.ddl.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_count_empty/scalar_count_empty.1.ddl.sqlpp
@@@ -1,26 -1,0 +1,26 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +/*
-  * Description    : Tests the scalar version of sql-count with an empty list.
++ * Description    : Tests the scalar version of coll_sql-count with an empty list.
 + * Success        : Yes
 + */
 +
 +drop  database test if exists;
 +create  database test;
 +

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_count_empty/scalar_count_empty.2.update.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_count_empty/scalar_count_empty.2.update.sqlpp
index ba1e7ec,0000000..e297b5f
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_count_empty/scalar_count_empty.2.update.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_count_empty/scalar_count_empty.2.update.sqlpp
@@@ -1,23 -1,0 +1,23 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +/*
-  * Description    : Tests the scalar version of sql-count with an empty list.
++ * Description    : Tests the scalar version of coll_sql-count with an empty list.
 + * Success        : Yes
 + */
 +

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_count_empty/scalar_count_empty.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_count_empty/scalar_count_empty.3.query.sqlpp
index 5d37bc5,0000000..d2a5f86
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_count_empty/scalar_count_empty.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_count_empty/scalar_count_empty.3.query.sqlpp
@@@ -1,27 -1,0 +1,27 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +/*
-  * Description    : Tests the scalar version of sql-count with an empty list.
++ * Description    : Tests the scalar version of coll_sql-count with an empty list.
 + * Success        : Yes
 + */
 +
 +use test;
 +
 +
- select element test."sql-count"([]);
++select element test."coll_sql-count"([]);

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_count_null/scalar_count_null.1.ddl.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_count_null/scalar_count_null.1.ddl.sqlpp
index 2185f6f,0000000..cf4cb87
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_count_null/scalar_count_null.1.ddl.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_count_null/scalar_count_null.1.ddl.sqlpp
@@@ -1,26 -1,0 +1,26 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +/*
-  * Description    : Tests the scalar version of sql-count with nulls.
++ * Description    : Tests the scalar version of coll_sql-count with nulls.
 + * Success        : Yes
 + */
 +
 +drop  database test if exists;
 +create  database test;
 +

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_count_null/scalar_count_null.2.update.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_count_null/scalar_count_null.2.update.sqlpp
index d707601,0000000..be7acd0
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_count_null/scalar_count_null.2.update.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_count_null/scalar_count_null.2.update.sqlpp
@@@ -1,23 -1,0 +1,23 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +/*
-  * Description    : Tests the scalar version of sql-count with nulls.
++ * Description    : Tests the scalar version of coll_sql-count with nulls.
 + * Success        : Yes
 + */
 +

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_count_null/scalar_count_null.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_count_null/scalar_count_null.3.query.sqlpp
index c0500e4,0000000..abd63d9
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_count_null/scalar_count_null.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_count_null/scalar_count_null.3.query.sqlpp
@@@ -1,36 -1,0 +1,36 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +/*
-  * Description    : Tests the scalar version of sql-count with nulls.
++ * Description    : Tests the scalar version of coll_sql-count with nulls.
 + * Success        : Yes
 + */
 +
 +use test;
 +
 +
- with  i8 as test."sql-count"([test.int8('1'),test.int8('2'),test.int8('3'),null]),
-       i16 as test."sql-count"([test.int16('1'),test.int16('2'),test.int16('3'),null]),
-       i32 as test."sql-count"([test.int32('1'),test.int32('2'),test.int32('3'),null]),
-       i64 as test."sql-count"([test.int64('1'),test.int64('2'),test.int64('3'),null]),
-       f as test."sql-count"([test.float('1'),test.float('2'),test.float('3'),null]),
-       d as test."sql-count"([test.double('1'),test.double('2'),test.double('3'),null]),
-       s as test."sql-count"(['a','b','c',null])
++with  i8 as test."coll_sql-count"([test.int8('1'),test.int8('2'),test.int8('3'),null]),
++      i16 as test."coll_sql-count"([test.int16('1'),test.int16('2'),test.int16('3'),null]),
++      i32 as test."coll_sql-count"([test.int32('1'),test.int32('2'),test.int32('3'),null]),
++      i64 as test."coll_sql-count"([test.int64('1'),test.int64('2'),test.int64('3'),null]),
++      f as test."coll_sql-count"([test.float('1'),test.float('2'),test.float('3'),null]),
++      d as test."coll_sql-count"([test.double('1'),test.double('2'),test.double('3'),null]),
++      s as test."coll_sql-count"(['a','b','c',null])
 +select element i
 +from  [i8,i16,i32,i64,f,d,s] as i
 +;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_max/scalar_max.1.ddl.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_max/scalar_max.1.ddl.sqlpp
index d95f53b,0000000..50cadbb
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_max/scalar_max.1.ddl.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_max/scalar_max.1.ddl.sqlpp
@@@ -1,26 -1,0 +1,26 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +/*
-  * Description    : Tests the scalar version of sql-max without nulls.
++ * Description    : Tests the scalar version of coll_sql-max without nulls.
 + * Success        : Yes
 + */
 +
 +drop  database test if exists;
 +create  database test;
 +

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_max/scalar_max.2.update.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_max/scalar_max.2.update.sqlpp
index 2e50374,0000000..0582e1c
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_max/scalar_max.2.update.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_max/scalar_max.2.update.sqlpp
@@@ -1,23 -1,0 +1,23 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +/*
-  * Description    : Tests the scalar version of sql-max without nulls.
++ * Description    : Tests the scalar version of coll_sql-max without nulls.
 + * Success        : Yes
 + */
 +

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_max/scalar_max.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_max/scalar_max.3.query.sqlpp
index ca96275,0000000..910128e
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_max/scalar_max.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_max/scalar_max.3.query.sqlpp
@@@ -1,37 -1,0 +1,37 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +/*
-  * Description    : Tests the scalar version of sql-max without nulls.
++ * Description    : Tests the scalar version of coll_sql-max without nulls.
 + * Success        : Yes
 + */
 +
 +use test;
 +
 +
- with  i8 as test."sql-max"([test.int8('1'),test.int8('2'),test.int8('3')]),
-       i16 as test."sql-max"([test.int16('1'),test.int16('2'),test.int16('3')]),
-       i32 as test."sql-max"([test.int32('1'),test.int32('2'),test.int32('3')]),
-       i64 as test."sql-max"([test.int64('1'),test.int64('2'),test.int64('3')]),
-       f as test."sql-max"([test.float('1'),test.float('2'),test.float('3')]),
-       d as test."sql-max"([test.double('1'),test.double('2'),test.double('3')]),
-       s as test."sql-max"(['foo','bar','world']),
-       dt as test."sql-max"([test.datetime('2012-03-01T00:00:00Z'),test.datetime('2012-01-01T00:00:00Z'),test.datetime('2012-02-01T00:00:00Z')])
++with  i8 as test."coll_sql-max"([test.int8('1'),test.int8('2'),test.int8('3')]),
++      i16 as test."coll_sql-max"([test.int16('1'),test.int16('2'),test.int16('3')]),
++      i32 as test."coll_sql-max"([test.int32('1'),test.int32('2'),test.int32('3')]),
++      i64 as test."coll_sql-max"([test.int64('1'),test.int64('2'),test.int64('3')]),
++      f as test."coll_sql-max"([test.float('1'),test.float('2'),test.float('3')]),
++      d as test."coll_sql-max"([test.double('1'),test.double('2'),test.double('3')]),
++      s as test."coll_sql-max"(['foo','bar','world']),
++      dt as test."coll_sql-max"([test.datetime('2012-03-01T00:00:00Z'),test.datetime('2012-01-01T00:00:00Z'),test.datetime('2012-02-01T00:00:00Z')])
 +select element i
 +from  [i8,i16,i32,i64,f,d,s,dt] as i
 +;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_max_empty/scalar_max_empty.1.ddl.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_max_empty/scalar_max_empty.1.ddl.sqlpp
index 1c9bfdd,0000000..14a7cce
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_max_empty/scalar_max_empty.1.ddl.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_max_empty/scalar_max_empty.1.ddl.sqlpp
@@@ -1,26 -1,0 +1,26 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +/*
-  * Description    : Tests the scalar version of sql-max with an empty list.
++ * Description    : Tests the scalar version of coll_sql-max with an empty list.
 + * Success        : Yes
 + */
 +
 +drop  database test if exists;
 +create  database test;
 +

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_max_empty/scalar_max_empty.2.update.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_max_empty/scalar_max_empty.2.update.sqlpp
index f4e1750,0000000..575f92d
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_max_empty/scalar_max_empty.2.update.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_max_empty/scalar_max_empty.2.update.sqlpp
@@@ -1,23 -1,0 +1,23 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +/*
-  * Description    : Tests the scalar version of sql-max with an empty list.
++ * Description    : Tests the scalar version of coll_sql-max with an empty list.
 + * Success        : Yes
 + */
 +

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_max_empty/scalar_max_empty.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_max_empty/scalar_max_empty.3.query.sqlpp
index 3535f66,0000000..8a16ce0
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_max_empty/scalar_max_empty.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_max_empty/scalar_max_empty.3.query.sqlpp
@@@ -1,27 -1,0 +1,27 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +/*
-  * Description    : Tests the scalar version of sql-max with an empty list.
++ * Description    : Tests the scalar version of coll_sql-max with an empty list.
 + * Success        : Yes
 + */
 +
 +use test;
 +
 +
- select element test."sql-max"([]);
++select element test."coll_sql-max"([]);

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_max_null/scalar_max_null.1.ddl.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_max_null/scalar_max_null.1.ddl.sqlpp
index 2673223,0000000..76d9682
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_max_null/scalar_max_null.1.ddl.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_max_null/scalar_max_null.1.ddl.sqlpp
@@@ -1,26 -1,0 +1,26 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +/*
-  * Description    : Tests the scalar version of sql-max with nulls.
++ * Description    : Tests the scalar version of coll_sql-max with nulls.
 + * Success        : Yes
 + */
 +
 +drop  database test if exists;
 +create  database test;
 +

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_max_null/scalar_max_null.2.update.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_max_null/scalar_max_null.2.update.sqlpp
index a0de2d4,0000000..61fb2ed
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_max_null/scalar_max_null.2.update.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_max_null/scalar_max_null.2.update.sqlpp
@@@ -1,23 -1,0 +1,23 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +/*
-  * Description    : Tests the scalar version of sql-max with nulls.
++ * Description    : Tests the scalar version of coll_sql-max with nulls.
 + * Success        : Yes
 + */
 +

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_max_null/scalar_max_null.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_max_null/scalar_max_null.3.query.sqlpp
index 813307a,0000000..4f7b412
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_max_null/scalar_max_null.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_max_null/scalar_max_null.3.query.sqlpp
@@@ -1,37 -1,0 +1,37 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +/*
-  * Description    : Tests the scalar version of sql-max with nulls.
++ * Description    : Tests the scalar version of coll_sql-max with nulls.
 + * Success        : Yes
 + */
 +
 +use test;
 +
 +
- with  i8 as test."sql-max"([test.int8('1'),test.int8('2'),test.int8('3'),null]),
-       i16 as test."sql-max"([test.int16('1'),test.int16('2'),test.int16('3'),null]),
-       i32 as test."sql-max"([test.int32('1'),test.int32('2'),test.int32('3'),null]),
-       i64 as test."sql-max"([test.int64('1'),test.int64('2'),test.int64('3'),null]),
-       f as test."sql-max"([test.float('1'),test.float('2'),test.float('3'),null]),
-       d as test."sql-max"([test.double('1'),test.double('2'),test.double('3'),null]),
-       s as test."sql-max"(['foo','bar','world',null]),
-       dt as test."sql-max"([test.datetime('2012-03-01T00:00:00Z'),test.datetime('2012-01-01T00:00:00Z'),test.datetime('2012-02-01T00:00:00Z'),null])
++with  i8 as test."coll_sql-max"([test.int8('1'),test.int8('2'),test.int8('3'),null]),
++      i16 as test."coll_sql-max"([test.int16('1'),test.int16('2'),test.int16('3'),null]),
++      i32 as test."coll_sql-max"([test.int32('1'),test.int32('2'),test.int32('3'),null]),
++      i64 as test."coll_sql-max"([test.int64('1'),test.int64('2'),test.int64('3'),null]),
++      f as test."coll_sql-max"([test.float('1'),test.float('2'),test.float('3'),null]),
++      d as test."coll_sql-max"([test.double('1'),test.double('2'),test.double('3'),null]),
++      s as test."coll_sql-max"(['foo','bar','world',null]),
++      dt as test."coll_sql-max"([test.datetime('2012-03-01T00:00:00Z'),test.datetime('2012-01-01T00:00:00Z'),test.datetime('2012-02-01T00:00:00Z'),null])
 +select element i
 +from  [i8,i16,i32,i64,f,d,s,dt] as i
 +;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_min/scalar_min.1.ddl.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_min/scalar_min.1.ddl.sqlpp
index 7fec35a,0000000..dcbb75a
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_min/scalar_min.1.ddl.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_min/scalar_min.1.ddl.sqlpp
@@@ -1,26 -1,0 +1,26 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +/*
-  * Description    : Tests the scalar version of sql-min without nulls.
++ * Description    : Tests the scalar version of coll_sql-min without nulls.
 + * Success        : Yes
 + */
 +
 +drop  database test if exists;
 +create  database test;
 +

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_min/scalar_min.2.update.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_min/scalar_min.2.update.sqlpp
index b7f1259,0000000..0bcbe9c
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_min/scalar_min.2.update.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_min/scalar_min.2.update.sqlpp
@@@ -1,23 -1,0 +1,23 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +/*
-  * Description    : Tests the scalar version of sql-min without nulls.
++ * Description    : Tests the scalar version of coll_sql-min without nulls.
 + * Success        : Yes
 + */
 +

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_min/scalar_min.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_min/scalar_min.3.query.sqlpp
index eb52dad,0000000..91f2393
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_min/scalar_min.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_min/scalar_min.3.query.sqlpp
@@@ -1,37 -1,0 +1,37 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +/*
-  * Description    : Tests the scalar version of sql-min without nulls.
++ * Description    : Tests the scalar version of coll_sql-min without nulls.
 + * Success        : Yes
 + */
 +
 +use test;
 +
 +
- with  i8 as test."sql-min"([test.int8('1'),test.int8('2'),test.int8('3')]),
-       i16 as test."sql-min"([test.int16('1'),test.int16('2'),test.int16('3')]),
-       i32 as test."sql-min"([test.int32('1'),test.int32('2'),test.int32('3')]),
-       i64 as test."sql-min"([test.int64('1'),test.int64('2'),test.int64('3')]),
-       f as test."sql-min"([test.float('1'),test.float('2'),test.float('3')]),
-       d as test."sql-min"([test.double('1'),test.double('2'),test.double('3')]),
-       s as test."sql-min"(['foo','bar','world']),
-       dt as test."sql-min"([test.datetime('2012-03-01T00:00:00Z'),test.datetime('2012-01-01T00:00:00Z'),test.datetime('2012-02-01T00:00:00Z')])
++with  i8 as test."coll_sql-min"([test.int8('1'),test.int8('2'),test.int8('3')]),
++      i16 as test."coll_sql-min"([test.int16('1'),test.int16('2'),test.int16('3')]),
++      i32 as test."coll_sql-min"([test.int32('1'),test.int32('2'),test.int32('3')]),
++      i64 as test."coll_sql-min"([test.int64('1'),test.int64('2'),test.int64('3')]),
++      f as test."coll_sql-min"([test.float('1'),test.float('2'),test.float('3')]),
++      d as test."coll_sql-min"([test.double('1'),test.double('2'),test.double('3')]),
++      s as test."coll_sql-min"(['foo','bar','world']),
++      dt as test."coll_sql-min"([test.datetime('2012-03-01T00:00:00Z'),test.datetime('2012-01-01T00:00:00Z'),test.datetime('2012-02-01T00:00:00Z')])
 +select element i
 +from  [i8,i16,i32,i64,f,d,s,dt] as i
 +;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_min_empty/scalar_min_empty.1.ddl.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_min_empty/scalar_min_empty.1.ddl.sqlpp
index f473284,0000000..10bfbc3
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_min_empty/scalar_min_empty.1.ddl.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_min_empty/scalar_min_empty.1.ddl.sqlpp
@@@ -1,26 -1,0 +1,26 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +/*
-  * Description    : Tests the scalar version of sql-min with an empty list.
++ * Description    : Tests the scalar version of coll_sql-min with an empty list.
 + * Success        : Yes
 + */
 +
 +drop  database test if exists;
 +create  database test;
 +

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_min_empty/scalar_min_empty.2.update.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_min_empty/scalar_min_empty.2.update.sqlpp
index bf0e2ec,0000000..e045685
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_min_empty/scalar_min_empty.2.update.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_min_empty/scalar_min_empty.2.update.sqlpp
@@@ -1,23 -1,0 +1,23 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +/*
-  * Description    : Tests the scalar version of sql-min with an empty list.
++ * Description    : Tests the scalar version of coll_sql-min with an empty list.
 + * Success        : Yes
 + */
 +

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_min_empty/scalar_min_empty.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_min_empty/scalar_min_empty.3.query.sqlpp
index fb1ca8c,0000000..07c8619
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_min_empty/scalar_min_empty.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_min_empty/scalar_min_empty.3.query.sqlpp
@@@ -1,27 -1,0 +1,27 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +/*
-  * Description    : Tests the scalar version of sql-min with an empty list.
++ * Description    : Tests the scalar version of coll_sql-min with an empty list.
 + * Success        : Yes
 + */
 +
 +use test;
 +
 +
- select element test."sql-min"([]);
++select element test."coll_sql-min"([]);

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_min_null/scalar_min_null.1.ddl.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_min_null/scalar_min_null.1.ddl.sqlpp
index e2059e6,0000000..5b21e8f
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_min_null/scalar_min_null.1.ddl.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_min_null/scalar_min_null.1.ddl.sqlpp
@@@ -1,26 -1,0 +1,26 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +/*
-  * Description    : Tests the scalar version of sql-min with nulls.
++ * Description    : Tests the scalar version of coll_sql-min with nulls.
 + * Success        : Yes
 + */
 +
 +drop  database test if exists;
 +create  database test;
 +

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_min_null/scalar_min_null.2.update.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_min_null/scalar_min_null.2.update.sqlpp
index 4fc7710,0000000..ae2d673
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_min_null/scalar_min_null.2.update.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_min_null/scalar_min_null.2.update.sqlpp
@@@ -1,23 -1,0 +1,23 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +/*
-  * Description    : Tests the scalar version of sql-min with nulls.
++ * Description    : Tests the scalar version of coll_sql-min with nulls.
 + * Success        : Yes
 + */
 +

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_min_null/scalar_min_null.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_min_null/scalar_min_null.3.query.sqlpp
index 290a1c2,0000000..9d30374
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_min_null/scalar_min_null.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_min_null/scalar_min_null.3.query.sqlpp
@@@ -1,37 -1,0 +1,37 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +/*
-  * Description    : Tests the scalar version of sql-min with nulls.
++ * Description    : Tests the scalar version of coll_sql-min with nulls.
 + * Success        : Yes
 + */
 +
 +use test;
 +
 +
- with  i8 as test."sql-min"([test.int8('1'),test.int8('2'),test.int8('3'),null]),
-       i16 as test."sql-min"([test.int16('1'),test.int16('2'),test.int16('3'),null]),
-       i32 as test."sql-min"([test.int32('1'),test.int32('2'),test.int32('3'),null]),
-       i64 as test."sql-min"([test.int64('1'),test.int64('2'),test.int64('3'),null]),
-       f as test."sql-min"([test.float('1'),test.float('2'),test.float('3'),null]),
-       d as test."sql-min"([test.double('1'),test.double('2'),test.double('3'),null]),
-       s as test."sql-min"(['foo','bar','world',null]),
-       dt as test."sql-min"([test.datetime('2012-03-01T00:00:00Z'),test.datetime('2012-01-01T00:00:00Z'),test.datetime('2012-02-01T00:00:00Z'),null])
++with  i8 as test."coll_sql-min"([test.int8('1'),test.int8('2'),test.int8('3'),null]),
++      i16 as test."coll_sql-min"([test.int16('1'),test.int16('2'),test.int16('3'),null]),
++      i32 as test."coll_sql-min"([test.int32('1'),test.int32('2'),test.int32('3'),null]),
++      i64 as test."coll_sql-min"([test.int64('1'),test.int64('2'),test.int64('3'),null]),
++      f as test."coll_sql-min"([test.float('1'),test.float('2'),test.float('3'),null]),
++      d as test."coll_sql-min"([test.double('1'),test.double('2'),test.double('3'),null]),
++      s as test."coll_sql-min"(['foo','bar','world',null]),
++      dt as test."coll_sql-min"([test.datetime('2012-03-01T00:00:00Z'),test.datetime('2012-01-01T00:00:00Z'),test.datetime('2012-02-01T00:00:00Z'),null])
 +select element i
 +from  [i8,i16,i32,i64,f,d,s,dt] as i
 +;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_sum/scalar_sum.1.ddl.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_sum/scalar_sum.1.ddl.sqlpp
index d6aa2ee,0000000..693c8d8
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_sum/scalar_sum.1.ddl.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_sum/scalar_sum.1.ddl.sqlpp
@@@ -1,26 -1,0 +1,26 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +/*
-  * Description    : Tests the scalar version of sql-sum without nulls.
++ * Description    : Tests the scalar version of coll_sql-sum without nulls.
 + * Success        : Yes
 + */
 +
 +drop  database test if exists;
 +create  database test;
 +

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_sum/scalar_sum.2.update.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_sum/scalar_sum.2.update.sqlpp
index dc9de50,0000000..fe5a187
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_sum/scalar_sum.2.update.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_sum/scalar_sum.2.update.sqlpp
@@@ -1,23 -1,0 +1,23 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +/*
-  * Description    : Tests the scalar version of sql-sum without nulls.
++ * Description    : Tests the scalar version of coll_sql-sum without nulls.
 + * Success        : Yes
 + */
 +

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_sum/scalar_sum.3.query.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_sum/scalar_sum.3.query.sqlpp
index 9f799ec,0000000..69f68a4
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_sum/scalar_sum.3.query.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_sum/scalar_sum.3.query.sqlpp
@@@ -1,35 -1,0 +1,35 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +/*
-  * Description    : Tests the scalar version of sql-sum without nulls.
++ * Description    : Tests the scalar version of coll_sql-sum without nulls.
 + * Success        : Yes
 + */
 +
 +use test;
 +
 +
- with  i8 as test."sql-sum"([test.int8('1'),test.int8('2'),test.int8('3')]),
-       i16 as test."sql-sum"([test.int16('1'),test.int16('2'),test.int16('3')]),
-       i32 as test."sql-sum"([test.int32('1'),test.int32('2'),test.int32('3')]),
-       i64 as test."sql-sum"([test.int64('1'),test.int64('2'),test.int64('3')]),
-       f as test."sql-sum"([test.float('1'),test.float('2'),test.float('3')]),
-       d as test."sql-sum"([test.double('1'),test.double('2'),test.double('3')])
++with  i8 as test."coll_sql-sum"([test.int8('1'),test.int8('2'),test.int8('3')]),
++      i16 as test."coll_sql-sum"([test.int16('1'),test.int16('2'),test.int16('3')]),
++      i32 as test."coll_sql-sum"([test.int32('1'),test.int32('2'),test.int32('3')]),
++      i64 as test."coll_sql-sum"([test.int64('1'),test.int64('2'),test.int64('3')]),
++      f as test."coll_sql-sum"([test.float('1'),test.float('2'),test.float('3')]),
++      d as test."coll_sql-sum"([test.double('1'),test.double('2'),test.double('3')])
 +select element i
 +from  [i8,i16,i32,i64,f,d] as i
 +;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_sum_empty/scalar_sum_empty.1.ddl.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_sum_empty/scalar_sum_empty.1.ddl.sqlpp
index 4a58f94,0000000..8ec3c87
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_sum_empty/scalar_sum_empty.1.ddl.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_sum_empty/scalar_sum_empty.1.ddl.sqlpp
@@@ -1,26 -1,0 +1,26 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +/*
-  * Description    : Tests the scalar version of sql-sum with an empty list.
++ * Description    : Tests the scalar version of coll_sql-sum with an empty list.
 + * Success        : Yes
 + */
 +
 +drop  database test if exists;
 +create  database test;
 +

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_sum_empty/scalar_sum_empty.2.update.sqlpp
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_sum_empty/scalar_sum_empty.2.update.sqlpp
index cbf8790,0000000..eed803e
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_sum_empty/scalar_sum_empty.2.update.sqlpp
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/queries_sqlpp/aggregate-sql/scalar_sum_empty/scalar_sum_empty.2.update.sqlpp
@@@ -1,23 -1,0 +1,23 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +/*
-  * Description    : Tests the scalar version of sql-sum with an empty list.
++ * Description    : Tests the scalar version of coll_sql-sum with an empty list.
 + * Success        : Yes
 + */
 +



[15/50] [abbrv] incubator-asterixdb git commit: Merge branch 'master' into hyracks-merge2

Posted by im...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/LocalFSInputStream.java
----------------------------------------------------------------------
diff --cc asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/LocalFSInputStream.java
index 00c1eb7,0000000..3c3b8fb
mode 100644,000000..100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/LocalFSInputStream.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/LocalFSInputStream.java
@@@ -1,182 -1,0 +1,178 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +package org.apache.asterix.external.input.stream;
 +
 +import java.io.File;
 +import java.io.FileInputStream;
 +import java.io.IOException;
- import java.nio.file.Path;
- import java.util.Map;
 +
 +import org.apache.asterix.external.api.AsterixInputStream;
 +import org.apache.asterix.external.dataflow.AbstractFeedDataFlowController;
 +import org.apache.asterix.external.util.ExternalDataConstants;
 +import org.apache.asterix.external.util.FeedLogManager;
 +import org.apache.asterix.external.util.FileSystemWatcher;
- import org.apache.hyracks.api.context.IHyracksTaskContext;
 +import org.apache.hyracks.api.exceptions.HyracksDataException;
- import org.apache.hyracks.dataflow.std.file.FileSplit;
 +import org.apache.log4j.Logger;
 +
 +public class LocalFSInputStream extends AsterixInputStream {
 +
 +    private static final Logger LOGGER = Logger.getLogger(LocalFSInputStream.class.getName());
-     private final Path path;
 +    private final FileSystemWatcher watcher;
 +    private FileInputStream in;
 +    private byte lastByte;
 +    private File currentFile;
 +
-     public LocalFSInputStream(final FileSplit[] fileSplits, final IHyracksTaskContext ctx,
-             final Map<String, String> configuration, final int partition, final String expression, final boolean isFeed)
-             throws IOException {
-         this.path = fileSplits[partition].getLocalFile().getFile().toPath();
-         this.watcher = new FileSystemWatcher(path, expression, isFeed);
-         this.watcher.init();
-     }
- 
-     @Override
-     public void setFeedLogManager(FeedLogManager logManager) {
-         super.setFeedLogManager(logManager);
-         watcher.setFeedLogManager(logManager);
++    public LocalFSInputStream(FileSystemWatcher watcher) {
++        this.watcher = watcher;
 +    }
 +
 +    @Override
 +    public void setController(AbstractFeedDataFlowController controller) {
 +        super.setController(controller);
-         watcher.setController(controller);
 +    }
 +
 +    @Override
++    public void setFeedLogManager(FeedLogManager logManager) throws HyracksDataException {
++        super.setFeedLogManager(logManager);
++        watcher.setFeedLogManager(logManager);
++    };
++
++    @Override
 +    public void close() throws IOException {
 +        IOException ioe = null;
 +        if (in != null) {
 +            try {
 +                closeFile();
 +            } catch (Exception e) {
 +                ioe = new IOException(e);
 +            }
 +        }
 +        try {
 +            watcher.close();
 +        } catch (Exception e) {
 +            if (ioe == null) {
 +                throw e;
 +            }
 +            ioe.addSuppressed(e);
 +            throw ioe;
 +        }
 +    }
 +
 +    private void closeFile() throws IOException {
 +        if (in != null) {
++            if (logManager != null) {
++                logManager.endPartition(currentFile.getAbsolutePath());
++            }
 +            try {
 +                in.close();
 +            } finally {
 +                in = null;
 +                currentFile = null;
 +            }
 +        }
 +    }
 +
 +    /**
 +     * Closes the current input stream and opens the next one, if any.
 +     */
 +    private boolean advance() throws IOException {
 +        closeFile();
-         if (watcher.hasNext()) {
-             currentFile = watcher.next();
++        currentFile = watcher.poll();
++        if (currentFile == null) {
++            if (controller != null) {
++                controller.flush();
++            }
++            currentFile = watcher.take();
++        }
++        if (currentFile != null) {
 +            in = new FileInputStream(currentFile);
++            if (notificationHandler != null) {
++                notificationHandler.notifyNewSource();
++            }
 +            return true;
 +        }
 +        return false;
 +    }
 +
 +    @Override
 +    public int read() throws IOException {
 +        throw new HyracksDataException(
 +                "read() is not supported with this stream. use read(byte[] b, int off, int len)");
 +    }
 +
 +    @Override
 +    public int read(byte[] b, int off, int len) throws IOException {
 +        if (in == null) {
 +            if (!advance()) {
 +                return -1;
 +            }
 +        }
 +        int result = in.read(b, off, len);
 +        while ((result < 0) && advance()) {
 +            // return a new line at the end of every file <--Might create problems for some cases
 +            // depending on the parser implementation-->
 +            if ((lastByte != ExternalDataConstants.BYTE_LF) && (lastByte != ExternalDataConstants.BYTE_LF)) {
 +                lastByte = ExternalDataConstants.BYTE_LF;
 +                b[off] = ExternalDataConstants.BYTE_LF;
 +                return 1;
 +            }
 +            // recursive call
 +            result = in.read(b, off, len);
 +        }
 +        if (result > 0) {
 +            lastByte = b[(off + result) - 1];
 +        }
 +        return result;
 +    }
 +
 +    @Override
 +    public boolean stop() throws Exception {
++        closeFile();
 +        watcher.close();
 +        return true;
 +    }
 +
 +    @Override
 +    public boolean handleException(Throwable th) {
 +        if (in == null) {
 +            return false;
 +        }
 +        if (th instanceof IOException) {
 +            // TODO: Change from string check to exception type
 +            if (th.getCause().getMessage().contains("Malformed input stream")) {
 +                if (currentFile != null) {
 +                    try {
 +                        logManager.logRecord(currentFile.getAbsolutePath(), "Corrupted input file");
 +                    } catch (IOException e) {
 +                        LOGGER.warn("Filed to write to feed log file", e);
 +                    }
 +                    LOGGER.warn("Corrupted input file: " + currentFile.getAbsolutePath());
 +                }
 +                try {
 +                    advance();
 +                    return true;
 +                } catch (Exception e) {
-                     return false;
-                 }
-             } else {
-                 try {
-                     watcher.init();
-                 } catch (IOException e) {
-                     LOGGER.warn("Failed to initialize watcher during failure recovery", e);
-                     return false;
++                    LOGGER.warn("An exception was thrown while trying to skip a file", e);
 +                }
 +            }
-             return true;
 +        }
++        LOGGER.warn("Failed to recover from failure", th);
 +        return false;
 +    }
 +}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/factory/LocalFSInputStreamFactory.java
----------------------------------------------------------------------
diff --cc asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/factory/LocalFSInputStreamFactory.java
index 85d0e41,0000000..ae012f3
mode 100644,000000..100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/factory/LocalFSInputStreamFactory.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/factory/LocalFSInputStreamFactory.java
@@@ -1,158 -1,0 +1,164 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +package org.apache.asterix.external.input.stream.factory;
 +
 +import java.io.File;
- import java.io.IOException;
++import java.nio.file.Path;
++import java.util.ArrayList;
 +import java.util.Map;
++import java.util.Set;
++import java.util.TreeSet;
 +import java.util.logging.Level;
 +import java.util.logging.Logger;
 +
 +import org.apache.asterix.common.exceptions.AsterixException;
 +import org.apache.asterix.external.api.AsterixInputStream;
 +import org.apache.asterix.external.api.IInputStreamFactory;
 +import org.apache.asterix.external.api.INodeResolver;
 +import org.apache.asterix.external.api.INodeResolverFactory;
 +import org.apache.asterix.external.input.stream.LocalFSInputStream;
 +import org.apache.asterix.external.util.ExternalDataConstants;
 +import org.apache.asterix.external.util.ExternalDataUtils;
- import org.apache.asterix.external.util.FeedUtils;
++import org.apache.asterix.external.util.FileSystemWatcher;
 +import org.apache.asterix.external.util.NodeResolverFactory;
 +import org.apache.hyracks.algebricks.common.constraints.AlgebricksAbsolutePartitionConstraint;
 +import org.apache.hyracks.api.context.IHyracksTaskContext;
 +import org.apache.hyracks.api.exceptions.HyracksDataException;
 +import org.apache.hyracks.api.io.FileReference;
 +import org.apache.hyracks.dataflow.std.file.FileSplit;
 +
 +public class LocalFSInputStreamFactory implements IInputStreamFactory {
 +
 +    private static final long serialVersionUID = 1L;
 +
 +    protected static final INodeResolver DEFAULT_NODE_RESOLVER = new NodeResolverFactory().createNodeResolver();
 +    protected static final Logger LOGGER = Logger.getLogger(LocalFSInputStreamFactory.class.getName());
 +    protected static INodeResolver nodeResolver;
 +    protected Map<String, String> configuration;
 +    protected FileSplit[] inputFileSplits;
-     protected FileSplit[] feedLogFileSplits; // paths where instances of this feed can use as log storage
 +    protected boolean isFeed;
 +    protected String expression;
 +    // transient fields (They don't need to be serialized and transferred)
 +    private transient AlgebricksAbsolutePartitionConstraint constraints;
++    private transient FileSystemWatcher watcher;
 +
 +    @Override
-     public AsterixInputStream createInputStream(IHyracksTaskContext ctx, int partition) throws HyracksDataException {
-         try {
-             return new LocalFSInputStream(inputFileSplits, ctx, configuration, partition, expression, isFeed);
-         } catch (IOException e) {
-             throw new HyracksDataException(e);
++    public synchronized AsterixInputStream createInputStream(IHyracksTaskContext ctx, int partition)
++            throws HyracksDataException {
++        if (watcher == null) {
++            String nodeName = ctx.getJobletContext().getApplicationContext().getNodeId();
++            ArrayList<Path> inputResources = new ArrayList<>();
++            for (int i = 0; i < inputFileSplits.length; i++) {
++                if (inputFileSplits[i].getNodeName().equals(nodeName)) {
++                    inputResources.add(inputFileSplits[i].getLocalFile().getFile().toPath());
++                }
++            }
++            watcher = new FileSystemWatcher(inputResources, expression, isFeed);
 +        }
++        return new LocalFSInputStream(watcher);
 +    }
 +
 +    @Override
 +    public DataSourceType getDataSourceType() {
 +        return DataSourceType.STREAM;
 +    }
 +
 +    @Override
 +    public boolean isIndexible() {
 +        return false;
 +    }
 +
 +    @Override
 +    public void configure(Map<String, String> configuration) throws AsterixException {
 +        this.configuration = configuration;
 +        String[] splits = configuration.get(ExternalDataConstants.KEY_PATH).split(",");
 +        configureFileSplits(splits);
 +        configurePartitionConstraint();
 +        this.isFeed = ExternalDataUtils.isFeed(configuration) && ExternalDataUtils.keepDataSourceOpen(configuration);
-         if (isFeed) {
-             feedLogFileSplits = FeedUtils.splitsForAdapter(ExternalDataUtils.getDataverse(configuration),
-                     ExternalDataUtils.getFeedName(configuration), constraints);
-         }
 +        this.expression = configuration.get(ExternalDataConstants.KEY_EXPRESSION);
 +    }
 +
 +    @Override
 +    public AlgebricksAbsolutePartitionConstraint getPartitionConstraint() {
 +        return constraints;
 +    }
 +
 +    private void configureFileSplits(String[] splits) throws AsterixException {
++        INodeResolver resolver = getNodeResolver();
 +        if (inputFileSplits == null) {
 +            inputFileSplits = new FileSplit[splits.length];
 +            String nodeName;
 +            String nodeLocalPath;
 +            int count = 0;
 +            String trimmedValue;
 +            for (String splitPath : splits) {
 +                trimmedValue = splitPath.trim();
 +                if (!trimmedValue.contains("://")) {
 +                    throw new AsterixException(
 +                            "Invalid path: " + splitPath + "\nUsage- path=\"Host://Absolute File Path\"");
 +                }
-                 nodeName = trimmedValue.split(":")[0];
++                nodeName = resolver.resolveNode(trimmedValue.split(":")[0]);
 +                nodeLocalPath = trimmedValue.split("://")[1];
 +                FileSplit fileSplit = new FileSplit(nodeName, new FileReference(new File(nodeLocalPath)));
 +                inputFileSplits[count++] = fileSplit;
 +            }
 +        }
 +    }
 +
 +    private void configurePartitionConstraint() throws AsterixException {
-         String[] locs = new String[inputFileSplits.length];
-         String location;
++        Set<String> locs = new TreeSet<>();
 +        for (int i = 0; i < inputFileSplits.length; i++) {
-             location = getNodeResolver().resolveNode(inputFileSplits[i].getNodeName());
-             locs[i] = location;
++            String location = inputFileSplits[i].getNodeName();
++            locs.add(location);
 +        }
-         constraints = new AlgebricksAbsolutePartitionConstraint(locs);
++        constraints = new AlgebricksAbsolutePartitionConstraint(locs.toArray(new String[locs.size()]));
 +    }
 +
 +    protected INodeResolver getNodeResolver() {
 +        if (nodeResolver == null) {
 +            synchronized (DEFAULT_NODE_RESOLVER) {
 +                if (nodeResolver == null) {
 +                    nodeResolver = initializeNodeResolver();
 +                }
 +            }
 +        }
 +        return nodeResolver;
 +    }
 +
 +    private static INodeResolver initializeNodeResolver() {
 +        INodeResolver nodeResolver = null;
 +        String configuredNodeResolverFactory = System.getProperty(ExternalDataConstants.NODE_RESOLVER_FACTORY_PROPERTY);
 +        if (configuredNodeResolverFactory != null) {
 +            try {
 +                nodeResolver = ((INodeResolverFactory) (Class.forName(configuredNodeResolverFactory).newInstance()))
 +                        .createNodeResolver();
 +
 +            } catch (Exception e) {
 +                if (LOGGER.isLoggable(Level.WARNING)) {
 +                    LOGGER.log(Level.WARNING, "Unable to create node resolver from the configured classname "
 +                            + configuredNodeResolverFactory + "\n" + e.getMessage());
 +                }
 +                nodeResolver = DEFAULT_NODE_RESOLVER;
 +            }
 +        } else {
 +            nodeResolver = DEFAULT_NODE_RESOLVER;
 +        }
 +        return nodeResolver;
 +    }
 +}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/provider/DataflowControllerProvider.java
----------------------------------------------------------------------
diff --cc asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/provider/DataflowControllerProvider.java
index d362201,0000000..6ba27d8
mode 100644,000000..100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/provider/DataflowControllerProvider.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/provider/DataflowControllerProvider.java
@@@ -1,129 -1,0 +1,124 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +package org.apache.asterix.external.provider;
 +
 +import java.io.IOException;
 +import java.util.Map;
 +
 +import org.apache.asterix.common.exceptions.AsterixException;
 +import org.apache.asterix.external.api.AsterixInputStream;
 +import org.apache.asterix.external.api.IDataFlowController;
 +import org.apache.asterix.external.api.IDataParserFactory;
 +import org.apache.asterix.external.api.IExternalDataSourceFactory;
 +import org.apache.asterix.external.api.IIndexingDatasource;
 +import org.apache.asterix.external.api.IInputStreamFactory;
 +import org.apache.asterix.external.api.IRecordDataParser;
 +import org.apache.asterix.external.api.IRecordDataParserFactory;
 +import org.apache.asterix.external.api.IRecordReader;
 +import org.apache.asterix.external.api.IRecordReaderFactory;
 +import org.apache.asterix.external.api.IRecordWithPKDataParser;
 +import org.apache.asterix.external.api.IStreamDataParser;
 +import org.apache.asterix.external.api.IStreamDataParserFactory;
 +import org.apache.asterix.external.dataflow.ChangeFeedDataFlowController;
 +import org.apache.asterix.external.dataflow.ChangeFeedWithMetaDataFlowController;
 +import org.apache.asterix.external.dataflow.FeedRecordDataFlowController;
 +import org.apache.asterix.external.dataflow.FeedStreamDataFlowController;
 +import org.apache.asterix.external.dataflow.FeedTupleForwarder;
 +import org.apache.asterix.external.dataflow.FeedWithMetaDataFlowController;
 +import org.apache.asterix.external.dataflow.IndexingDataFlowController;
 +import org.apache.asterix.external.dataflow.RecordDataFlowController;
 +import org.apache.asterix.external.dataflow.StreamDataFlowController;
 +import org.apache.asterix.external.parser.RecordWithMetadataParser;
 +import org.apache.asterix.external.util.DataflowUtils;
 +import org.apache.asterix.external.util.ExternalDataUtils;
 +import org.apache.asterix.external.util.FeedLogManager;
 +import org.apache.asterix.external.util.FeedUtils;
 +import org.apache.asterix.om.types.ARecordType;
 +import org.apache.hyracks.api.context.IHyracksTaskContext;
 +import org.apache.hyracks.api.exceptions.HyracksDataException;
- import org.apache.hyracks.dataflow.std.file.FileSplit;
 +
 +public class DataflowControllerProvider {
 +
 +    // TODO: Instead, use a factory just like data source and data parser.
 +    @SuppressWarnings({ "rawtypes", "unchecked" })
 +    public static IDataFlowController getDataflowController(ARecordType recordType, IHyracksTaskContext ctx,
 +            int partition, IExternalDataSourceFactory dataSourceFactory, IDataParserFactory dataParserFactory,
-             Map<String, String> configuration, boolean indexingOp, boolean isFeed, FileSplit[] feedLogFileSplits)
-                     throws HyracksDataException {
++            Map<String, String> configuration, boolean indexingOp, boolean isFeed, FeedLogManager feedLogManager)
++            throws HyracksDataException {
 +        try {
-             FeedLogManager feedLogManager = null;
-             if (isFeed) {
-                 feedLogManager = FeedUtils.getFeedLogManager(ctx, partition, feedLogFileSplits);
-             }
 +            switch (dataSourceFactory.getDataSourceType()) {
 +                case RECORDS:
 +                    IRecordReaderFactory<?> recordReaderFactory = (IRecordReaderFactory<?>) dataSourceFactory;
 +                    IRecordReader<?> recordReader = recordReaderFactory.createRecordReader(ctx, partition);
 +                    IRecordDataParserFactory<?> recordParserFactory = (IRecordDataParserFactory<?>) dataParserFactory;
 +                    IRecordDataParser<?> dataParser = recordParserFactory.createRecordParser(ctx);
 +                    if (indexingOp) {
 +                        return new IndexingDataFlowController(ctx,
 +                                DataflowUtils.getTupleForwarder(configuration, feedLogManager), dataParser,
 +                                recordReader, ((IIndexingDatasource) recordReader).getIndexer());
 +                    } else if (isFeed) {
 +                        FeedTupleForwarder tupleForwarder = (FeedTupleForwarder) DataflowUtils
 +                                .getTupleForwarder(configuration, feedLogManager);
 +                        boolean isChangeFeed = ExternalDataUtils.isChangeFeed(configuration);
 +                        boolean isRecordWithMeta = ExternalDataUtils.isRecordWithMeta(configuration);
 +                        if (isRecordWithMeta) {
 +                            if (isChangeFeed) {
 +                                int numOfKeys = ExternalDataUtils.getNumberOfKeys(configuration);
 +                                return new ChangeFeedWithMetaDataFlowController(ctx, tupleForwarder, feedLogManager,
 +                                        numOfKeys + 2, (RecordWithMetadataParser) dataParser, recordReader);
 +                            } else {
 +                                return new FeedWithMetaDataFlowController(ctx, tupleForwarder, feedLogManager, 2,
 +                                        (RecordWithMetadataParser) dataParser, recordReader);
 +                            }
 +                        } else if (isChangeFeed) {
 +                            int numOfKeys = ExternalDataUtils.getNumberOfKeys(configuration);
 +                            return new ChangeFeedDataFlowController(ctx, tupleForwarder, feedLogManager, numOfKeys + 1,
 +                                    (IRecordWithPKDataParser) dataParser, recordReader);
 +                        } else {
 +                            return new FeedRecordDataFlowController(ctx, tupleForwarder, feedLogManager, 1, dataParser,
 +                                    recordReader);
 +                        }
 +                    } else {
 +                        return new RecordDataFlowController(ctx,
 +                                DataflowUtils.getTupleForwarder(configuration, feedLogManager), dataParser,
 +                                recordReader, 1);
 +                    }
 +                case STREAM:
 +                    IInputStreamFactory streamFactory = (IInputStreamFactory) dataSourceFactory;
 +                    AsterixInputStream stream = streamFactory.createInputStream(ctx, partition);
 +                    IStreamDataParserFactory streamParserFactory = (IStreamDataParserFactory) dataParserFactory;
 +                    IStreamDataParser streamParser = streamParserFactory.createInputStreamParser(ctx, partition);
 +                    streamParser.setInputStream(stream);
 +                    if (isFeed) {
 +                        return new FeedStreamDataFlowController(ctx,
 +                                (FeedTupleForwarder) DataflowUtils.getTupleForwarder(configuration, feedLogManager),
 +                                feedLogManager, FeedUtils.getNumOfFields(configuration), streamParser, stream);
 +                    } else {
 +                        return new StreamDataFlowController(ctx, DataflowUtils.getTupleForwarder(configuration, null),
 +                                streamParser);
 +                    }
 +                default:
 +                    throw new HyracksDataException(
 +                            "Unknown data source type: " + dataSourceFactory.getDataSourceType());
 +            }
 +        } catch (IOException | AsterixException e) {
 +            throw new HyracksDataException(e);
 +        }
 +    }
 +}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/provider/DatasourceFactoryProvider.java
----------------------------------------------------------------------
diff --cc asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/provider/DatasourceFactoryProvider.java
index f8d64e0,0000000..0f24f91
mode 100644,000000..100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/provider/DatasourceFactoryProvider.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/provider/DatasourceFactoryProvider.java
@@@ -1,149 -1,0 +1,115 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +package org.apache.asterix.external.provider;
 +
 +import java.util.Map;
 +
 +import org.apache.asterix.common.exceptions.AsterixException;
 +import org.apache.asterix.external.api.IExternalDataSourceFactory;
 +import org.apache.asterix.external.api.IExternalDataSourceFactory.DataSourceType;
 +import org.apache.asterix.external.api.IInputStreamFactory;
 +import org.apache.asterix.external.api.IRecordReaderFactory;
 +import org.apache.asterix.external.input.HDFSDataSourceFactory;
 +import org.apache.asterix.external.input.record.reader.RecordWithPKTestReaderFactory;
 +import org.apache.asterix.external.input.record.reader.kv.KVReaderFactory;
 +import org.apache.asterix.external.input.record.reader.kv.KVTestReaderFactory;
- import org.apache.asterix.external.input.record.reader.stream.EmptyLineSeparatedRecordReaderFactory;
- import org.apache.asterix.external.input.record.reader.stream.LineRecordReaderFactory;
- import org.apache.asterix.external.input.record.reader.stream.SemiStructuredRecordReaderFactory;
++import org.apache.asterix.external.input.record.reader.stream.StreamRecordReaderFactory;
 +import org.apache.asterix.external.input.record.reader.twitter.TwitterRecordReaderFactory;
 +import org.apache.asterix.external.input.stream.factory.LocalFSInputStreamFactory;
++import org.apache.asterix.external.input.stream.factory.SocketClientInputStreamFactory;
 +import org.apache.asterix.external.input.stream.factory.SocketServerInputStreamFactory;
 +import org.apache.asterix.external.input.stream.factory.TwitterFirehoseStreamFactory;
 +import org.apache.asterix.external.util.ExternalDataConstants;
 +import org.apache.asterix.external.util.ExternalDataUtils;
 +
 +public class DatasourceFactoryProvider {
 +
 +    public static IExternalDataSourceFactory getExternalDataSourceFactory(Map<String, String> configuration)
 +            throws AsterixException {
 +        if (ExternalDataUtils.getDataSourceType(configuration).equals(DataSourceType.RECORDS)) {
 +            String reader = configuration.get(ExternalDataConstants.KEY_READER);
 +            return DatasourceFactoryProvider.getRecordReaderFactory(reader, configuration);
 +        } else {
 +            // get stream source
 +            String streamSource = configuration.get(ExternalDataConstants.KEY_STREAM_SOURCE);
 +            return DatasourceFactoryProvider.getInputStreamFactory(streamSource, configuration);
 +        }
 +    }
 +
-     public static IInputStreamFactory getInputStreamFactory(String streamSource,
-             Map<String, String> configuration) throws AsterixException {
++    public static IInputStreamFactory getInputStreamFactory(String streamSource, Map<String, String> configuration)
++            throws AsterixException {
 +        IInputStreamFactory streamSourceFactory;
 +        if (ExternalDataUtils.isExternal(streamSource)) {
 +            String dataverse = ExternalDataUtils.getDataverse(configuration);
 +            streamSourceFactory = ExternalDataUtils.createExternalInputStreamFactory(dataverse, streamSource);
 +        } else {
 +            switch (streamSource) {
-                 case ExternalDataConstants.STREAM_HDFS:
-                     streamSourceFactory = new HDFSDataSourceFactory();
-                     break;
 +                case ExternalDataConstants.STREAM_LOCAL_FILESYSTEM:
 +                    streamSourceFactory = new LocalFSInputStreamFactory();
 +                    break;
-                 case ExternalDataConstants.STREAM_SOCKET:
++                case ExternalDataConstants.SOCKET:
 +                case ExternalDataConstants.ALIAS_SOCKET_ADAPTER:
 +                    streamSourceFactory = new SocketServerInputStreamFactory();
 +                    break;
 +                case ExternalDataConstants.STREAM_SOCKET_CLIENT:
 +                    streamSourceFactory = new SocketServerInputStreamFactory();
 +                    break;
 +                case ExternalDataConstants.ALIAS_TWITTER_FIREHOSE_ADAPTER:
 +                    streamSourceFactory = new TwitterFirehoseStreamFactory();
 +                    break;
 +                default:
 +                    throw new AsterixException("unknown input stream factory");
 +            }
 +        }
 +        return streamSourceFactory;
 +    }
 +
 +    public static IRecordReaderFactory<?> getRecordReaderFactory(String reader, Map<String, String> configuration)
 +            throws AsterixException {
 +        if (reader.equals(ExternalDataConstants.EXTERNAL)) {
 +            return ExternalDataUtils.createExternalRecordReaderFactory(configuration);
 +        }
-         String parser = configuration.get(ExternalDataConstants.KEY_PARSER);
-         IInputStreamFactory inputStreamFactory;
-         switch (parser) {
-             case ExternalDataConstants.FORMAT_ADM:
-             case ExternalDataConstants.FORMAT_JSON:
-             case ExternalDataConstants.FORMAT_SEMISTRUCTURED:
-                 inputStreamFactory = DatasourceFactoryProvider.getInputStreamFactory(reader, configuration);
-                 return new SemiStructuredRecordReaderFactory().setInputStreamFactoryProvider(inputStreamFactory);
-             case ExternalDataConstants.FORMAT_LINE_SEPARATED:
-                 inputStreamFactory = DatasourceFactoryProvider.getInputStreamFactory(reader, configuration);
-                 return new EmptyLineSeparatedRecordReaderFactory().setInputStreamFactoryProvider(inputStreamFactory);
-             case ExternalDataConstants.FORMAT_DELIMITED_TEXT:
-             case ExternalDataConstants.FORMAT_CSV:
-                 inputStreamFactory = DatasourceFactoryProvider.getInputStreamFactory(reader, configuration);
-                 return new LineRecordReaderFactory().setInputStreamFactoryProvider(inputStreamFactory);
-             case ExternalDataConstants.FORMAT_RECORD_WITH_METADATA:
-                 switch (reader) {
-                     case ExternalDataConstants.READER_KV:
-                         return new KVReaderFactory();
-                     case ExternalDataConstants.READER_KV_TEST:
-                         return new KVTestReaderFactory();
-                 }
-         }
-         String format = configuration.get(ExternalDataConstants.KEY_FORMAT);
-         if (format != null) {
-             switch (format) {
-                 case ExternalDataConstants.FORMAT_ADM:
-                 case ExternalDataConstants.FORMAT_JSON:
-                 case ExternalDataConstants.FORMAT_SEMISTRUCTURED:
-                     inputStreamFactory = DatasourceFactoryProvider.getInputStreamFactory(reader, configuration);
-                     return new SemiStructuredRecordReaderFactory().setInputStreamFactoryProvider(inputStreamFactory);
-                 case ExternalDataConstants.FORMAT_LINE_SEPARATED:
-                     inputStreamFactory = DatasourceFactoryProvider.getInputStreamFactory(reader, configuration);
-                     return new EmptyLineSeparatedRecordReaderFactory()
-                             .setInputStreamFactoryProvider(inputStreamFactory);
-                 case ExternalDataConstants.FORMAT_DELIMITED_TEXT:
-                 case ExternalDataConstants.FORMAT_CSV:
-                     inputStreamFactory = DatasourceFactoryProvider.getInputStreamFactory(reader, configuration);
-                     return new LineRecordReaderFactory().setInputStreamFactoryProvider(inputStreamFactory);
-             }
-         }
 +        switch (reader) {
++            case ExternalDataConstants.READER_KV:
++                return new KVReaderFactory();
++            case ExternalDataConstants.READER_KV_TEST:
++                return new KVTestReaderFactory();
 +            case ExternalDataConstants.READER_HDFS:
 +                return new HDFSDataSourceFactory();
++            case ExternalDataConstants.ALIAS_LOCALFS_ADAPTER:
++                return new StreamRecordReaderFactory(new LocalFSInputStreamFactory());
 +            case ExternalDataConstants.READER_TWITTER_PULL:
 +            case ExternalDataConstants.READER_TWITTER_PUSH:
++            case ExternalDataConstants.READER_PUSH_TWITTER:
++            case ExternalDataConstants.READER_PULL_TWITTER:
 +                return new TwitterRecordReaderFactory();
-             case ExternalDataConstants.READER_KV:
-                 return new KVReaderFactory();
-             case ExternalDataConstants.READER_KV_TEST:
-                 return new KVTestReaderFactory();
 +            case ExternalDataConstants.TEST_RECORD_WITH_PK:
 +                return new RecordWithPKTestReaderFactory();
++            case ExternalDataConstants.ALIAS_TWITTER_FIREHOSE_ADAPTER:
++                return new StreamRecordReaderFactory(new TwitterFirehoseStreamFactory());
++            case ExternalDataConstants.ALIAS_SOCKET_ADAPTER:
++            case ExternalDataConstants.SOCKET:
++                return new StreamRecordReaderFactory(new SocketServerInputStreamFactory());
++            case ExternalDataConstants.STREAM_SOCKET_CLIENT:
++                return new StreamRecordReaderFactory(new SocketClientInputStreamFactory());
 +            default:
 +                throw new AsterixException("unknown record reader factory: " + reader);
 +        }
 +    }
 +}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/provider/ParserFactoryProvider.java
----------------------------------------------------------------------
diff --cc asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/provider/ParserFactoryProvider.java
index 06928b3,0000000..682fb89
mode 100644,000000..100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/provider/ParserFactoryProvider.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/provider/ParserFactoryProvider.java
@@@ -1,76 -1,0 +1,76 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +package org.apache.asterix.external.provider;
 +
 +import java.util.Map;
 +
 +import javax.annotation.Nonnull;
 +
 +import org.apache.asterix.common.exceptions.AsterixException;
 +import org.apache.asterix.external.api.IDataParserFactory;
 +import org.apache.asterix.external.parser.factory.ADMDataParserFactory;
 +import org.apache.asterix.external.parser.factory.DelimitedDataParserFactory;
 +import org.apache.asterix.external.parser.factory.HiveDataParserFactory;
 +import org.apache.asterix.external.parser.factory.RSSParserFactory;
 +import org.apache.asterix.external.parser.factory.RecordWithMetadataParserFactory;
 +import org.apache.asterix.external.parser.factory.TestRecordWithPKParserFactory;
 +import org.apache.asterix.external.parser.factory.TweetParserFactory;
 +import org.apache.asterix.external.util.ExternalDataConstants;
 +import org.apache.asterix.external.util.ExternalDataUtils;
 +
 +public class ParserFactoryProvider {
 +    public static IDataParserFactory getDataParserFactory(Map<String, String> configuration) throws AsterixException {
 +        IDataParserFactory parserFactory = null;
 +        String parserFactoryName = configuration.get(ExternalDataConstants.KEY_DATA_PARSER);
 +        if ((parserFactoryName != null) && ExternalDataUtils.isExternal(parserFactoryName)) {
 +            return ExternalDataUtils.createExternalParserFactory(ExternalDataUtils.getDataverse(configuration),
 +                    parserFactoryName);
 +        } else {
 +            parserFactory = ParserFactoryProvider
 +                    .getDataParserFactory(ExternalDataUtils.getRecordFormat(configuration));
 +        }
 +        return parserFactory;
 +    }
 +
 +    @SuppressWarnings("rawtypes")
 +    public static IDataParserFactory getDataParserFactory(@Nonnull String parser) throws AsterixException {
 +        switch (parser) {
 +            case ExternalDataConstants.FORMAT_ADM:
 +            case ExternalDataConstants.FORMAT_JSON:
 +            case ExternalDataConstants.FORMAT_SEMISTRUCTURED:
 +                return new ADMDataParserFactory();
 +            case ExternalDataConstants.FORMAT_DELIMITED_TEXT:
 +            case ExternalDataConstants.FORMAT_CSV:
 +                return new DelimitedDataParserFactory();
 +            case ExternalDataConstants.FORMAT_HIVE:
 +            case ExternalDataConstants.PARSER_HIVE:
 +                return new HiveDataParserFactory();
 +            case ExternalDataConstants.FORMAT_TWEET:
 +                return new TweetParserFactory();
 +            case ExternalDataConstants.FORMAT_RSS:
 +                return new RSSParserFactory();
 +            case ExternalDataConstants.FORMAT_RECORD_WITH_METADATA:
 +                return new RecordWithMetadataParserFactory();
 +            case ExternalDataConstants.TEST_RECORD_WITH_PK:
 +                return new TestRecordWithPKParserFactory();
 +            default:
-                 throw new AsterixException("Unknown parser " + parser);
++                throw new AsterixException("Unknown format: " + parser);
 +        }
 +    }
 +}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/util/ExternalDataConstants.java
----------------------------------------------------------------------
diff --cc asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/util/ExternalDataConstants.java
index a02152b,0000000..b5ec27a
mode 100644,000000..100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/util/ExternalDataConstants.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/util/ExternalDataConstants.java
@@@ -1,232 -1,0 +1,232 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +package org.apache.asterix.external.util;
 +
 +import org.apache.hadoop.hdfs.DistributedFileSystem;
 +import org.apache.hadoop.hive.ql.io.RCFileInputFormat;
 +import org.apache.hadoop.mapred.SequenceFileInputFormat;
 +import org.apache.hadoop.mapred.TextInputFormat;
 +
 +public class ExternalDataConstants {
 +    // TODO: Remove unused variables.
 +    /**
 +     * Keys
 +     */
 +    // used to specify the stream factory for an adapter that has a stream data source
 +    public static final String KEY_STREAM = "stream";
 +    // used to specify the dataverse of the adapter
 +    public static final String KEY_DATAVERSE = "dataverse";
 +    // used to specify the socket addresses when reading data from sockets
 +    public static final String KEY_SOCKETS = "sockets";
 +    // specify whether the socket address points to an NC or an IP
 +    public static final String KEY_MODE = "address-type";
 +    // specify the HDFS name node address when reading HDFS data
 +    public static final String KEY_HDFS_URL = "hdfs";
 +    // specify the path when reading from a file system
 +    public static final String KEY_PATH = "path";
 +    // specify the HDFS input format when reading data from HDFS
 +    public static final String KEY_INPUT_FORMAT = "input-format";
 +    // specifies the filesystem (localfs or HDFS) when using a filesystem data source
 +    public static final String KEY_FILESYSTEM = "fs";
 +    // specifies the address of the HDFS name node
 +    public static final String KEY_HADOOP_FILESYSTEM_URI = "fs.defaultFS";
 +    // specifies the class implementation of the accessed instance of HDFS
 +    public static final String KEY_HADOOP_FILESYSTEM_CLASS = "fs.hdfs.impl";
 +    public static final String KEY_HADOOP_INPUT_DIR = "mapred.input.dir";
 +    public static final String KEY_HADOOP_INPUT_FORMAT = "mapred.input.format.class";
 +    public static final String KEY_HADOOP_SHORT_CIRCUIT = "dfs.client.read.shortcircuit";
 +    public static final String KEY_HADOOP_SOCKET_PATH = "dfs.domain.socket.path";
 +    public static final String KEY_HADOOP_BUFFER_SIZE = "io.file.buffer.size";
 +    public static final String KEY_SOURCE_DATATYPE = "type-name";
 +    public static final String KEY_DELIMITER = "delimiter";
 +    public static final String KEY_PARSER_FACTORY = "tuple-parser";
 +    public static final String KEY_DATA_PARSER = "parser";
 +    public static final String KEY_HEADER = "header";
 +    public static final String KEY_READER = "reader";
 +    public static final String KEY_READER_STREAM = "stream";
 +    public static final String KEY_TYPE_NAME = "type-name";
 +    public static final String KEY_RECORD_START = "record-start";
 +    public static final String KEY_RECORD_END = "record-end";
 +    public static final String KEY_EXPRESSION = "expression";
 +    public static final String KEY_LOCAL_SOCKET_PATH = "local-socket-path";
 +    public static final String KEY_FORMAT = "format";
 +    public static final String KEY_QUOTE = "quote";
 +    public static final String KEY_PARSER = "parser";
 +    public static final String KEY_DATASET_RECORD = "dataset-record";
 +    public static final String KEY_HIVE_SERDE = "hive-serde";
 +    public static final String KEY_RSS_URL = "url";
 +    public static final String KEY_INTERVAL = "interval";
-     public static final String KEY_PULL = "pull";
-     public static final String KEY_PUSH = "push";
 +    public static final String KEY_IS_FEED = "is-feed";
 +    public static final String KEY_WAIT_FOR_DATA = "wait-for-data";
 +    public static final String KEY_FEED_NAME = "feed";
 +    // a string representing external bucket name
 +    public static final String KEY_BUCKET = "bucket";
 +    // a comma delimited list of nodes
 +    public static final String KEY_NODES = "nodes";
 +    // a string representing the password used to authenticate with the external data source
 +    public static final String KEY_PASSWORD = "password";
 +    // an integer representing the number of raw records that can be buffered in the parsing queue
 +    public static final String KEY_QUEUE_SIZE = "queue-size";
 +    // a comma delimited integers representing the indexes of the meta fields in the raw record (i,e: "3,1,0,2" denotes that the first meta field is in index 3 in the actual record)
 +    public static final String KEY_META_INDEXES = "meta-indexes";
 +    // an integer representing the index of the value field in the data type
 +    public static final String KEY_VALUE_INDEX = "value-index";
 +    // a string representing the format of the raw record in the value field in the data type
 +    public static final String KEY_VALUE_FORMAT = "value-format";
 +    // a boolean indicating whether the feed is a change feed
 +    public static final String KEY_IS_CHANGE_FEED = "change-feed";
 +    // an integer representing the number of keys in a change feed
 +    public static final String KEY_KEY_SIZE = "key-size";
 +    // a boolean indicating whether the feed produces records with metadata
 +    public static final String FORMAT_RECORD_WITH_METADATA = "record-with-metadata";
 +    // a string representing the format of the record (for adapters which produces records with additional information like pk or metadata)
 +    public static final String KEY_RECORD_FORMAT = "record-format";
 +    public static final String KEY_META_TYPE_NAME = "meta-type-name";
 +    public static final String READER_STREAM = "stream";
 +    /**
 +     * HDFS class names
 +     */
 +    public static final String CLASS_NAME_TEXT_INPUT_FORMAT = TextInputFormat.class.getName();
 +    public static final String CLASS_NAME_SEQUENCE_INPUT_FORMAT = SequenceFileInputFormat.class.getName();
 +    public static final String CLASS_NAME_RC_INPUT_FORMAT = RCFileInputFormat.class.getName();
 +    public static final String CLASS_NAME_HDFS_FILESYSTEM = DistributedFileSystem.class.getName();
 +    /**
 +     * input formats aliases
 +     */
 +    public static final String INPUT_FORMAT_TEXT = "text-input-format";
 +    public static final String INPUT_FORMAT_SEQUENCE = "sequence-input-format";
 +    public static final String INPUT_FORMAT_RC = "rc-input-format";
 +    /**
 +     * Builtin streams
 +     */
 +
 +    /**
 +     * Builtin record readers
 +     */
 +    public static final String READER_HDFS = "hdfs";
 +    public static final String READER_KV = "key-value";
-     public static final String READER_TWITTER_PUSH = "twitter-push";
-     public static final String READER_TWITTER_PULL = "twitter-pull";
++    public static final String READER_TWITTER_PUSH = "twitter_push";
++    public static final String READER_PUSH_TWITTER = "push_twitter";
++    public static final String READER_TWITTER_PULL = "twitter_pull";
++    public static final String READER_PULL_TWITTER = "pull_twitter";
 +
 +    public static final String CLUSTER_LOCATIONS = "cluster-locations";
 +    public static final String SCHEDULER = "hdfs-scheduler";
 +    public static final String PARSER_HIVE = "hive-parser";
 +    public static final String HAS_HEADER = "has.header";
 +    public static final String TIME_TRACKING = "time.tracking";
 +    public static final String DEFAULT_QUOTE = "\"";
 +    public static final String NODE_RESOLVER_FACTORY_PROPERTY = "node.Resolver";
 +    public static final String DEFAULT_DELIMITER = ",";
 +    public static final String EXTERNAL_LIBRARY_SEPARATOR = "#";
 +    public static final String HDFS_INDEXING_ADAPTER = "hdfs-indexing-adapter";
 +    /**
 +     * supported builtin record formats
 +     */
 +    public static final String FORMAT_HIVE = "hive";
 +    public static final String FORMAT_BINARY = "binary";
 +    public static final String FORMAT_ADM = "adm";
 +    public static final String FORMAT_JSON = "json";
 +    public static final String FORMAT_DELIMITED_TEXT = "delimited-text";
 +    public static final String FORMAT_TWEET = "twitter-status";
 +    public static final String FORMAT_RSS = "rss";
 +    public static final String FORMAT_SEMISTRUCTURED = "semi-structured";
 +    public static final String FORMAT_LINE_SEPARATED = "line-separated";
 +    public static final String FORMAT_HDFS_WRITABLE = "hdfs-writable";
 +    public static final String FORMAT_KV = "kv";
 +
 +    /**
 +     * input streams
 +     */
 +    public static final String STREAM_HDFS = "hdfs";
 +    public static final String STREAM_LOCAL_FILESYSTEM = "localfs";
-     public static final String STREAM_SOCKET = "socket";
++    public static final String SOCKET = "socket";
 +    public static final String STREAM_SOCKET_CLIENT = "socket-client";
 +
 +    /**
 +     * adapter aliases
 +     */
 +    public static final String ALIAS_GENERIC_ADAPTER = "adapter";
 +    public static final String ALIAS_LOCALFS_ADAPTER = "localfs";
 +    public static final String ALIAS_LOCALFS_PUSH_ADAPTER = "push_localfs";
 +    public static final String ALIAS_HDFS_ADAPTER = "hdfs";
 +    public static final String ALIAS_SOCKET_ADAPTER = "socket_adapter";
 +    public static final String ALIAS_TWITTER_FIREHOSE_ADAPTER = "twitter_firehose";
 +    public static final String ALIAS_SOCKET_CLIENT_ADAPTER = "socket_client";
 +    public static final String ALIAS_RSS_ADAPTER = "rss_feed";
 +    public static final String ALIAS_FILE_FEED_ADAPTER = "file_feed";
 +    public static final String ALIAS_TWITTER_PUSH_ADAPTER = "push_twitter";
 +    public static final String ALIAS_TWITTER_PULL_ADAPTER = "pull_twitter";
 +    public static final String ALIAS_CNN_ADAPTER = "cnn_feed";
 +    public static final String ALIAS_FEED_WITH_META_ADAPTER = "feed_with_meta";
 +    public static final String ALIAS_CHANGE_FEED_WITH_META_ADAPTER = "change_feed_with_meta";
 +    // for testing purposes
 +    public static final String ALIAS_TEST_CHANGE_ADAPTER = "test_change_feed";
 +
 +    /**
 +     * Constant String values
 +     */
 +    public static final String TRUE = "true";
 +    public static final String FALSE = "false";
 +
 +    /**
 +     * Constant characters
 +     */
 +    public static final char ESCAPE = '\\';
 +    public static final char QUOTE = '"';
 +    public static final char SPACE = ' ';
 +    public static final char TAB = '\t';
 +    public static final char LF = '\n';
 +    public static final char CR = '\r';
 +    public static final char DEFAULT_RECORD_START = '{';
 +    public static final char DEFAULT_RECORD_END = '}';
 +
 +    /**
 +     * Constant byte characters
 +     */
 +    public static final byte BYTE_LF = '\n';
 +    public static final byte BYTE_CR = '\r';
 +    /**
 +     * Size default values
 +     */
 +    public static final int DEFAULT_BUFFER_SIZE = 4096;
 +    public static final int DEFAULT_BUFFER_INCREMENT = 2048;
 +    public static final int DEFAULT_QUEUE_SIZE = 64;
 +    public static final int MAX_RECORD_SIZE = 32000000;
 +
 +    /**
 +     * Expected parameter values
 +     */
 +    public static final String PARAMETER_OF_SIZE_ONE = "Value of size 1";
 +    public static final String LARGE_RECORD_ERROR_MESSAGE = "Record is too large";
 +    public static final String KEY_RECORD_INDEX = "record-index";
 +    public static final String FORMAT_DCP = "dcp";
 +    public static final String KEY_KEY_INDEXES = "key-indexes";
 +    public static final String KEY_KEY_INDICATORS = "key-indicators";
 +    public static final String KEY_STREAM_SOURCE = "stream-source";
 +    public static final String EXTERNAL = "external";
 +    public static final String KEY_READER_FACTORY = "reader-factory";
 +    public static final String READER_KV_TEST = "kv_test";
 +    public static final String READER_RSS = "rss";
 +    public static final String FORMAT_CSV = "csv";
 +    public static final String TEST_RECORD_WITH_PK = "test-record-with-pk";
 +
 +    public static final String ERROR_LARGE_RECORD = "Record is too large";
 +    public static final String ERROR_PARSE_RECORD = "Parser failed to parse record";
 +}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/util/ExternalDataUtils.java
----------------------------------------------------------------------
diff --cc asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/util/ExternalDataUtils.java
index 42fe8bf,0000000..76898c2
mode 100644,000000..100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/util/ExternalDataUtils.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/util/ExternalDataUtils.java
@@@ -1,342 -1,0 +1,326 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +package org.apache.asterix.external.util;
 +
 +import java.util.HashMap;
 +import java.util.List;
 +import java.util.Map;
 +
 +import org.apache.asterix.common.exceptions.AsterixException;
 +import org.apache.asterix.external.api.IDataParserFactory;
 +import org.apache.asterix.external.api.IExternalDataSourceFactory.DataSourceType;
 +import org.apache.asterix.external.api.IInputStreamFactory;
 +import org.apache.asterix.external.api.IRecordReaderFactory;
 +import org.apache.asterix.external.library.ExternalLibraryManager;
 +import org.apache.asterix.om.types.ARecordType;
 +import org.apache.asterix.om.types.ATypeTag;
 +import org.apache.asterix.om.types.AUnionType;
 +import org.apache.asterix.om.types.IAType;
 +import org.apache.hyracks.algebricks.common.exceptions.NotImplementedException;
 +import org.apache.hyracks.dataflow.common.data.parsers.DoubleParserFactory;
 +import org.apache.hyracks.dataflow.common.data.parsers.FloatParserFactory;
 +import org.apache.hyracks.dataflow.common.data.parsers.IValueParserFactory;
 +import org.apache.hyracks.dataflow.common.data.parsers.IntegerParserFactory;
 +import org.apache.hyracks.dataflow.common.data.parsers.LongParserFactory;
 +import org.apache.hyracks.dataflow.common.data.parsers.UTF8StringParserFactory;
 +
 +public class ExternalDataUtils {
 +
 +    // Get a delimiter from the given configuration
 +    public static char getDelimiter(Map<String, String> configuration) throws AsterixException {
 +        String delimiterValue = configuration.get(ExternalDataConstants.KEY_DELIMITER);
 +        if (delimiterValue == null) {
 +            delimiterValue = ExternalDataConstants.DEFAULT_DELIMITER;
 +        } else if (delimiterValue.length() != 1) {
 +            throw new AsterixException(
 +                    "'" + delimiterValue + "' is not a valid delimiter. The length of a delimiter should be 1.");
 +        }
 +        return delimiterValue.charAt(0);
 +    }
 +
 +    // Get a quote from the given configuration when the delimiter is given
 +    // Need to pass delimiter to check whether they share the same character
 +    public static char getQuote(Map<String, String> configuration, char delimiter) throws AsterixException {
 +        String quoteValue = configuration.get(ExternalDataConstants.KEY_QUOTE);
 +        if (quoteValue == null) {
 +            quoteValue = ExternalDataConstants.DEFAULT_QUOTE;
 +        } else if (quoteValue.length() != 1) {
 +            throw new AsterixException("'" + quoteValue + "' is not a valid quote. The length of a quote should be 1.");
 +        }
 +
 +        // Since delimiter (char type value) can't be null,
 +        // we only check whether delimiter and quote use the same character
 +        if (quoteValue.charAt(0) == delimiter) {
 +            throw new AsterixException(
 +                    "Quote '" + quoteValue + "' cannot be used with the delimiter '" + delimiter + "'. ");
 +        }
 +
 +        return quoteValue.charAt(0);
 +    }
 +
 +    // Get the header flag
 +    public static boolean getHasHeader(Map<String, String> configuration) {
 +        return Boolean.parseBoolean(configuration.get(ExternalDataConstants.KEY_HEADER));
 +    }
 +
 +    public static void validateParameters(Map<String, String> configuration) throws AsterixException {
 +        String reader = configuration.get(ExternalDataConstants.KEY_READER);
 +        if (reader == null) {
 +            throw new AsterixException("The parameter " + ExternalDataConstants.KEY_READER + " must be specified.");
 +        }
-         String parser = configuration.get(ExternalDataConstants.KEY_PARSER);
++        String parser = configuration.get(ExternalDataConstants.KEY_FORMAT);
 +        if (parser == null) {
-             throw new AsterixException("The parameter " + ExternalDataConstants.KEY_PARSER + " must be specified.");
++            throw new AsterixException("The parameter " + ExternalDataConstants.KEY_FORMAT + " must be specified.");
 +        }
 +    }
 +
 +    public static DataSourceType getDataSourceType(Map<String, String> configuration) {
 +        String reader = configuration.get(ExternalDataConstants.KEY_READER);
 +        if ((reader != null) && reader.equals(ExternalDataConstants.READER_STREAM)) {
 +            return DataSourceType.STREAM;
 +        } else {
 +            return DataSourceType.RECORDS;
 +        }
 +    }
 +
 +    public static boolean isExternal(String aString) {
 +        return ((aString != null) && aString.contains(ExternalDataConstants.EXTERNAL_LIBRARY_SEPARATOR)
 +                && (aString.trim().length() > 1));
 +    }
 +
 +    public static ClassLoader getClassLoader(String dataverse, String library) {
 +        return ExternalLibraryManager.getLibraryClassLoader(dataverse, library);
 +    }
 +
 +    public static String getLibraryName(String aString) {
 +        return aString.trim().split(FeedConstants.NamingConstants.LIBRARY_NAME_SEPARATOR)[0];
 +    }
 +
 +    public static String getExternalClassName(String aString) {
 +        return aString.trim().split(FeedConstants.NamingConstants.LIBRARY_NAME_SEPARATOR)[1];
 +    }
 +
 +    public static IInputStreamFactory createExternalInputStreamFactory(String dataverse, String stream)
 +            throws AsterixException {
 +        try {
 +            String libraryName = getLibraryName(stream);
 +            String className = getExternalClassName(stream);
 +            ClassLoader classLoader = getClassLoader(dataverse, libraryName);
 +            return ((IInputStreamFactory) (classLoader.loadClass(className).newInstance()));
 +        } catch (InstantiationException | IllegalAccessException | ClassNotFoundException e) {
 +            throw new AsterixException("Failed to create stream factory", e);
 +        }
 +    }
 +
 +    public static String getDataverse(Map<String, String> configuration) {
 +        return configuration.get(ExternalDataConstants.KEY_DATAVERSE);
 +    }
 +
 +    public static String getRecordFormat(Map<String, String> configuration) {
 +        String parserFormat = configuration.get(ExternalDataConstants.KEY_DATA_PARSER);
 +        return parserFormat != null ? parserFormat : configuration.get(ExternalDataConstants.KEY_FORMAT);
 +    }
 +
 +    public static void setRecordFormat(Map<String, String> configuration, String format) {
 +        if (!configuration.containsKey(ExternalDataConstants.KEY_DATA_PARSER)) {
 +            configuration.put(ExternalDataConstants.KEY_DATA_PARSER, format);
 +        }
 +        if (!configuration.containsKey(ExternalDataConstants.KEY_FORMAT)) {
 +            configuration.put(ExternalDataConstants.KEY_FORMAT, format);
 +        }
 +    }
 +
 +    private static Map<ATypeTag, IValueParserFactory> valueParserFactoryMap = initializeValueParserFactoryMap();
 +
 +    private static Map<ATypeTag, IValueParserFactory> initializeValueParserFactoryMap() {
 +        Map<ATypeTag, IValueParserFactory> m = new HashMap<ATypeTag, IValueParserFactory>();
 +        m.put(ATypeTag.INT32, IntegerParserFactory.INSTANCE);
 +        m.put(ATypeTag.FLOAT, FloatParserFactory.INSTANCE);
 +        m.put(ATypeTag.DOUBLE, DoubleParserFactory.INSTANCE);
 +        m.put(ATypeTag.INT64, LongParserFactory.INSTANCE);
 +        m.put(ATypeTag.STRING, UTF8StringParserFactory.INSTANCE);
 +        return m;
 +    }
 +
 +    public static IValueParserFactory[] getValueParserFactories(ARecordType recordType) {
 +        int n = recordType.getFieldTypes().length;
 +        IValueParserFactory[] fieldParserFactories = new IValueParserFactory[n];
 +        for (int i = 0; i < n; i++) {
 +            ATypeTag tag = null;
 +            if (recordType.getFieldTypes()[i].getTypeTag() == ATypeTag.UNION) {
 +                List<IAType> unionTypes = ((AUnionType) recordType.getFieldTypes()[i]).getUnionList();
 +                if ((unionTypes.size() != 2) && (unionTypes.get(0).getTypeTag() != ATypeTag.NULL)) {
 +                    throw new NotImplementedException("Non-optional UNION type is not supported.");
 +                }
 +                tag = unionTypes.get(1).getTypeTag();
 +            } else {
 +                tag = recordType.getFieldTypes()[i].getTypeTag();
 +            }
 +            if (tag == null) {
 +                throw new NotImplementedException("Failed to get the type information for field " + i + ".");
 +            }
 +            fieldParserFactories[i] = getParserFactory(tag);
 +        }
 +        return fieldParserFactories;
 +    }
 +
 +    public static IValueParserFactory getParserFactory(ATypeTag tag) {
 +        IValueParserFactory vpf = valueParserFactoryMap.get(tag);
 +        if (vpf == null) {
 +            throw new NotImplementedException("No value parser factory for fields of type " + tag);
 +        }
 +        return vpf;
 +    }
 +
 +    public static String getRecordReaderStreamName(Map<String, String> configuration) {
 +        return configuration.get(ExternalDataConstants.KEY_READER_STREAM);
 +    }
 +
 +    public static boolean hasHeader(Map<String, String> configuration) {
 +        String value = configuration.get(ExternalDataConstants.KEY_HEADER);
 +        if (value != null) {
 +            return Boolean.valueOf(value);
 +        }
 +        return false;
 +    }
 +
-     public static boolean isPull(Map<String, String> configuration) {
-         String pull = configuration.get(ExternalDataConstants.KEY_PULL);
-         if (pull == null) {
-             return false;
-         }
-         return Boolean.parseBoolean(pull);
-     }
- 
-     public static boolean isPush(Map<String, String> configuration) {
-         String push = configuration.get(ExternalDataConstants.KEY_PUSH);
-         if (push == null) {
-             return false;
-         }
-         return Boolean.parseBoolean(push);
-     }
- 
 +    public static IRecordReaderFactory<?> createExternalRecordReaderFactory(Map<String, String> configuration)
 +            throws AsterixException {
 +        String readerFactory = configuration.get(ExternalDataConstants.KEY_READER_FACTORY);
 +        if (readerFactory == null) {
 +            throw new AsterixException("to use " + ExternalDataConstants.EXTERNAL + " reader, the parameter "
 +                    + ExternalDataConstants.KEY_READER_FACTORY + " must be specified.");
 +        }
 +        String[] libraryAndFactory = readerFactory.split(ExternalDataConstants.EXTERNAL_LIBRARY_SEPARATOR);
 +        if (libraryAndFactory.length != 2) {
 +            throw new AsterixException("The parameter " + ExternalDataConstants.KEY_READER_FACTORY
 +                    + " must follow the format \"DataverseName.LibraryName#ReaderFactoryFullyQualifiedName\"");
 +        }
 +        String[] dataverseAndLibrary = libraryAndFactory[0].split(".");
 +        if (dataverseAndLibrary.length != 2) {
 +            throw new AsterixException("The parameter " + ExternalDataConstants.KEY_READER_FACTORY
 +                    + " must follow the format \"DataverseName.LibraryName#ReaderFactoryFullyQualifiedName\"");
 +        }
 +
 +        ClassLoader classLoader = ExternalLibraryManager.getLibraryClassLoader(dataverseAndLibrary[0],
 +                dataverseAndLibrary[1]);
 +        try {
 +            return (IRecordReaderFactory<?>) classLoader.loadClass(libraryAndFactory[1]).newInstance();
 +        } catch (InstantiationException | IllegalAccessException | ClassNotFoundException e) {
 +            throw new AsterixException("Failed to create record reader factory", e);
 +        }
 +    }
 +
 +    public static IDataParserFactory createExternalParserFactory(String dataverse, String parserFactoryName)
 +            throws AsterixException {
 +        try {
 +            String library = parserFactoryName.substring(0,
 +                    parserFactoryName.indexOf(ExternalDataConstants.EXTERNAL_LIBRARY_SEPARATOR));
 +            ClassLoader classLoader = ExternalLibraryManager.getLibraryClassLoader(dataverse, library);
 +            return (IDataParserFactory) classLoader
 +                    .loadClass(parserFactoryName
 +                            .substring(parserFactoryName.indexOf(ExternalDataConstants.EXTERNAL_LIBRARY_SEPARATOR) + 1))
 +                    .newInstance();
 +        } catch (InstantiationException | IllegalAccessException | ClassNotFoundException e) {
 +            throw new AsterixException("Failed to create an external parser factory", e);
 +        }
 +    }
 +
 +    public static boolean isFeed(Map<String, String> configuration) {
 +        if (!configuration.containsKey(ExternalDataConstants.KEY_IS_FEED)) {
 +            return false;
 +        } else {
 +            return Boolean.parseBoolean(configuration.get(ExternalDataConstants.KEY_IS_FEED));
 +        }
 +    }
 +
 +    public static void prepareFeed(Map<String, String> configuration, String dataverseName, String feedName) {
 +        if (!configuration.containsKey(ExternalDataConstants.KEY_IS_FEED)) {
 +            configuration.put(ExternalDataConstants.KEY_IS_FEED, ExternalDataConstants.TRUE);
 +        }
 +        configuration.put(ExternalDataConstants.KEY_DATAVERSE, dataverseName);
 +        configuration.put(ExternalDataConstants.KEY_FEED_NAME, feedName);
 +    }
 +
 +    public static boolean keepDataSourceOpen(Map<String, String> configuration) {
 +        if (!configuration.containsKey(ExternalDataConstants.KEY_WAIT_FOR_DATA)) {
 +            return true;
 +        }
 +        return Boolean.parseBoolean(configuration.get(ExternalDataConstants.KEY_WAIT_FOR_DATA));
 +    }
 +
 +    public static String getFeedName(Map<String, String> configuration) {
 +        return configuration.get(ExternalDataConstants.KEY_FEED_NAME);
 +    }
 +
 +    public static int getQueueSize(Map<String, String> configuration) {
 +        return configuration.containsKey(ExternalDataConstants.KEY_QUEUE_SIZE)
 +                ? Integer.parseInt(configuration.get(ExternalDataConstants.KEY_QUEUE_SIZE))
 +                : ExternalDataConstants.DEFAULT_QUEUE_SIZE;
 +    }
 +
 +    public static boolean isRecordWithMeta(Map<String, String> configuration) {
 +        return configuration.containsKey(ExternalDataConstants.KEY_META_TYPE_NAME);
 +    }
 +
 +    public static void setRecordWithMeta(Map<String, String> configuration, String booleanString) {
 +        configuration.put(ExternalDataConstants.FORMAT_RECORD_WITH_METADATA, booleanString);
 +    }
 +
 +    public static boolean isChangeFeed(Map<String, String> configuration) {
 +        return Boolean.parseBoolean(configuration.get(ExternalDataConstants.KEY_IS_CHANGE_FEED));
 +    }
 +
 +    public static int getNumberOfKeys(Map<String, String> configuration) throws AsterixException {
 +        String keyIndexes = configuration.get(ExternalDataConstants.KEY_KEY_INDEXES);
 +        if (keyIndexes == null) {
 +            throw new AsterixException(
 +                    "A change feed must have the parameter " + ExternalDataConstants.KEY_KEY_INDEXES);
 +        }
 +        return keyIndexes.split(",").length;
 +    }
 +
 +    public static void setNumberOfKeys(Map<String, String> configuration, int value) {
 +        configuration.put(ExternalDataConstants.KEY_KEY_SIZE, String.valueOf(value));
 +    }
 +
 +    public static void setChangeFeed(Map<String, String> configuration, String booleanString) {
 +        configuration.put(ExternalDataConstants.KEY_IS_CHANGE_FEED, booleanString);
 +    }
 +
 +    public static int[] getPKIndexes(Map<String, String> configuration) {
 +        String keyIndexes = configuration.get(ExternalDataConstants.KEY_KEY_INDEXES);
 +        String[] stringIndexes = keyIndexes.split(",");
 +        int[] intIndexes = new int[stringIndexes.length];
 +        for (int i = 0; i < stringIndexes.length; i++) {
 +            intIndexes[i] = Integer.parseInt(stringIndexes[i]);
 +        }
 +        return intIndexes;
 +    }
 +
 +    public static int[] getPKSourceIndicators(Map<String, String> configuration) {
 +        String keyIndicators = configuration.get(ExternalDataConstants.KEY_KEY_INDICATORS);
 +        String[] stringIndicators = keyIndicators.split(",");
 +        int[] intIndicators = new int[stringIndicators.length];
 +        for (int i = 0; i < stringIndicators.length; i++) {
 +            intIndicators[i] = Integer.parseInt(stringIndicators[i]);
 +        }
 +        return intIndicators;
 +    }
 +}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/util/FeedLogManager.java
----------------------------------------------------------------------
diff --cc asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/util/FeedLogManager.java
index fc15d3c,0000000..5bb8ec3
mode 100644,000000..100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/util/FeedLogManager.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/util/FeedLogManager.java
@@@ -1,172 -1,0 +1,180 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +package org.apache.asterix.external.util;
 +
 +import java.io.BufferedReader;
 +import java.io.BufferedWriter;
 +import java.io.File;
 +import java.io.IOException;
 +import java.nio.charset.StandardCharsets;
 +import java.nio.file.Files;
 +import java.nio.file.Path;
 +import java.nio.file.Paths;
 +import java.nio.file.StandardOpenOption;
 +import java.util.TreeSet;
 +
 +import org.apache.commons.io.FileUtils;
 +import org.apache.hyracks.api.exceptions.HyracksDataException;
 +
 +public class FeedLogManager {
 +
 +    public enum LogEntryType {
 +        START, // partition start
 +        END, // partition end
 +        COMMIT, // a record commit within a partition
-         SNAPSHOT // an identifier that partitions with identifiers before this one should be
-                  // ignored
++        SNAPSHOT // an identifier that partitions with identifiers before this one should be ignored
 +    }
 +
 +    public static final String PROGRESS_LOG_FILE_NAME = "progress.log";
 +    public static final String ERROR_LOG_FILE_NAME = "error.log";
 +    public static final String BAD_RECORDS_FILE_NAME = "failed_record.log";
 +    public static final String START_PREFIX = "s:";
 +    public static final String END_PREFIX = "e:";
 +    public static final int PREFIX_SIZE = 2;
 +    private String currentPartition;
 +    private final TreeSet<String> completed;
 +    private final Path dir;
 +    private BufferedWriter progressLogger;
 +    private BufferedWriter errorLogger;
 +    private BufferedWriter recordLogger;
 +    private final StringBuilder stringBuilder = new StringBuilder();
++    private int count = 0;
 +
 +    public FeedLogManager(File file) throws HyracksDataException {
 +        try {
 +            this.dir = file.toPath();
 +            this.completed = new TreeSet<String>();
 +            if (!exists()) {
 +                create();
 +            }
 +            open();
 +        } catch (IOException e) {
 +            throw new HyracksDataException(e);
 +        }
 +    }
 +
-     public void endPartition() throws IOException {
++    public synchronized void touch() {
++        count++;
++    }
++
++    public synchronized void endPartition() throws IOException {
 +        logProgress(END_PREFIX + currentPartition);
 +        completed.add(currentPartition);
 +    }
 +
-     public void endPartition(String partition) throws IOException {
++    public synchronized void endPartition(String partition) throws IOException {
 +        currentPartition = partition;
 +        logProgress(END_PREFIX + currentPartition);
 +        completed.add(currentPartition);
 +    }
 +
-     public void startPartition(String partition) throws IOException {
++    public synchronized void startPartition(String partition) throws IOException {
 +        currentPartition = partition;
 +        logProgress(START_PREFIX + currentPartition);
 +    }
 +
 +    public boolean exists() {
 +        return Files.exists(dir);
 +    }
 +
-     public void open() throws IOException {
++    public synchronized void open() throws IOException {
 +        // read content of logs.
 +        BufferedReader reader = Files.newBufferedReader(
 +                Paths.get(dir.toAbsolutePath().toString() + File.separator + PROGRESS_LOG_FILE_NAME));
 +        String log = reader.readLine();
 +        while (log != null) {
 +            if (log.startsWith(END_PREFIX)) {
 +                completed.add(getSplitId(log));
 +            }
 +            log = reader.readLine();
 +        }
 +        reader.close();
 +
 +        progressLogger = Files.newBufferedWriter(
 +                Paths.get(dir.toAbsolutePath().toString() + File.separator + PROGRESS_LOG_FILE_NAME),
 +                StandardCharsets.UTF_8, StandardOpenOption.APPEND);
 +        errorLogger = Files.newBufferedWriter(
 +                Paths.get(dir.toAbsolutePath().toString() + File.separator + ERROR_LOG_FILE_NAME),
 +                StandardCharsets.UTF_8, StandardOpenOption.APPEND);
 +        recordLogger = Files.newBufferedWriter(
 +                Paths.get(dir.toAbsolutePath().toString() + File.separator + BAD_RECORDS_FILE_NAME),
 +                StandardCharsets.UTF_8, StandardOpenOption.APPEND);
 +    }
 +
-     public void close() throws IOException {
++    public synchronized void close() throws IOException {
++        count--;
++        if (count > 0) {
++            return;
++        }
 +        progressLogger.close();
 +        errorLogger.close();
 +        recordLogger.close();
 +    }
 +
-     public boolean create() throws IOException {
++    public synchronized boolean create() throws IOException {
 +        File f = dir.toFile();
 +        f.mkdirs();
 +        new File(f, PROGRESS_LOG_FILE_NAME).createNewFile();
 +        new File(f, ERROR_LOG_FILE_NAME).createNewFile();
 +        new File(f, BAD_RECORDS_FILE_NAME).createNewFile();
 +        return true;
 +    }
 +
-     public boolean destroy() throws IOException {
++    public synchronized boolean destroy() throws IOException {
 +        File f = dir.toFile();
 +        FileUtils.deleteDirectory(f);
 +        return true;
 +    }
 +
-     public void logProgress(String log) throws IOException {
++    public synchronized void logProgress(String log) throws IOException {
 +        stringBuilder.setLength(0);
 +        stringBuilder.append(log);
 +        stringBuilder.append(ExternalDataConstants.LF);
 +        progressLogger.write(stringBuilder.toString());
 +        progressLogger.flush();
 +    }
 +
-     public void logError(String error, Throwable th) throws IOException {
++    public synchronized void logError(String error, Throwable th) throws IOException {
 +        stringBuilder.setLength(0);
 +        stringBuilder.append(error);
 +        stringBuilder.append(ExternalDataConstants.LF);
 +        stringBuilder.append(th.toString());
 +        stringBuilder.append(ExternalDataConstants.LF);
 +        errorLogger.write(stringBuilder.toString());
 +        errorLogger.flush();
 +    }
 +
-     public void logRecord(String record, String errorMessage) throws IOException {
++    public synchronized void logRecord(String record, String errorMessage) throws IOException {
 +        stringBuilder.setLength(0);
 +        stringBuilder.append(record);
 +        stringBuilder.append(ExternalDataConstants.LF);
 +        stringBuilder.append(errorMessage);
 +        stringBuilder.append(ExternalDataConstants.LF);
 +        recordLogger.write(stringBuilder.toString());
 +        recordLogger.flush();
 +    }
 +
 +    public static String getSplitId(String log) {
 +        return log.substring(PREFIX_SIZE);
 +    }
 +
-     public boolean isSplitRead(String split) {
++    public synchronized boolean isSplitRead(String split) {
 +        return completed.contains(split);
 +    }
 +}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/util/FeedUtils.java
----------------------------------------------------------------------
diff --cc asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/util/FeedUtils.java
index 5ab41af,0000000..502a432
mode 100644,000000..100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/util/FeedUtils.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/util/FeedUtils.java
@@@ -1,123 -1,0 +1,111 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +package org.apache.asterix.external.util;
 +
 +import java.io.File;
 +import java.nio.ByteBuffer;
 +import java.util.ArrayList;
 +import java.util.List;
 +import java.util.Map;
 +
 +import org.apache.asterix.common.cluster.ClusterPartition;
 +import org.apache.asterix.common.exceptions.AsterixException;
 +import org.apache.asterix.common.utils.StoragePathUtil;
 +import org.apache.asterix.om.util.AsterixClusterProperties;
 +import org.apache.hyracks.algebricks.common.constraints.AlgebricksAbsolutePartitionConstraint;
 +import org.apache.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraint;
 +import org.apache.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraint.PartitionConstraintType;
 +import org.apache.hyracks.api.comm.FrameHelper;
 +import org.apache.hyracks.api.context.IHyracksTaskContext;
 +import org.apache.hyracks.api.exceptions.HyracksDataException;
 +import org.apache.hyracks.api.io.FileReference;
 +import org.apache.hyracks.api.io.IIOManager;
 +import org.apache.hyracks.dataflow.common.comm.io.FrameTupleAccessor;
 +import org.apache.hyracks.dataflow.common.util.IntSerDeUtils;
 +import org.apache.hyracks.dataflow.std.file.FileSplit;
 +
 +public class FeedUtils {
 +    private static String prepareDataverseFeedName(String dataverseName, String feedName) {
 +        return dataverseName + File.separator + feedName;
 +    }
 +
-     public static FileSplit splitsForAdapter(String dataverseName, String feedName, int partition,
-             ClusterPartition[] nodePartitions) {
++    public static FileSplit splitsForAdapter(String dataverseName, String feedName, String nodeName,
++            ClusterPartition partition) {
 +        File relPathFile = new File(prepareDataverseFeedName(dataverseName, feedName));
 +        String storageDirName = AsterixClusterProperties.INSTANCE.getStorageDirectoryName();
-         ClusterPartition nodePartition = nodePartitions[0];
 +        String storagePartitionPath = StoragePathUtil.prepareStoragePartitionPath(storageDirName,
-                 nodePartition.getPartitionId());
-         // format: 'storage dir name'/partition_#/dataverse/feed/adapter_#
-         File f = new File(storagePartitionPath + File.separator + relPathFile + File.separator
-                 + StoragePathUtil.ADAPTER_INSTANCE_PREFIX + partition);
-         return StoragePathUtil.getFileSplitForClusterPartition(nodePartition, f);
++                partition.getPartitionId());
++        // Note: feed adapter instances in a single node share the feed logger
++        // format: 'storage dir name'/partition_#/dataverse/feed/node
++        File f = new File(storagePartitionPath + File.separator + relPathFile + File.separator + nodeName);
++        return StoragePathUtil.getFileSplitForClusterPartition(partition, f);
 +    }
 +
 +    public static FileSplit[] splitsForAdapter(String dataverseName, String feedName,
 +            AlgebricksPartitionConstraint partitionConstraints) throws AsterixException {
 +        if (partitionConstraints.getPartitionConstraintType() == PartitionConstraintType.COUNT) {
 +            throw new AsterixException("Can't create file splits for adapter with count partitioning constraints");
 +        }
-         File relPathFile = new File(prepareDataverseFeedName(dataverseName, feedName));
-         String[] locations = null;
-         locations = ((AlgebricksAbsolutePartitionConstraint) partitionConstraints).getLocations();
++        String[] locations = ((AlgebricksAbsolutePartitionConstraint) partitionConstraints).getLocations();
 +        List<FileSplit> splits = new ArrayList<FileSplit>();
-         String storageDirName = AsterixClusterProperties.INSTANCE.getStorageDirectoryName();
-         int i = 0;
 +        for (String nd : locations) {
-             // Always get the first partition
-             ClusterPartition nodePartition = AsterixClusterProperties.INSTANCE.getNodePartitions(nd)[0];
-             String storagePartitionPath = StoragePathUtil.prepareStoragePartitionPath(storageDirName,
-                     nodePartition.getPartitionId());
-             // format: 'storage dir name'/partition_#/dataverse/feed/adapter_#
-             File f = new File(storagePartitionPath + File.separator + relPathFile + File.separator
-                     + StoragePathUtil.ADAPTER_INSTANCE_PREFIX + i);
-             splits.add(StoragePathUtil.getFileSplitForClusterPartition(nodePartition, f));
-             i++;
++            splits.add(splitsForAdapter(dataverseName, feedName, nd,
++                    AsterixClusterProperties.INSTANCE.getNodePartitions(nd)[0]));
 +        }
 +        return splits.toArray(new FileSplit[] {});
 +    }
 +
 +    public static FileReference getAbsoluteFileRef(String relativePath, int ioDeviceId, IIOManager ioManager) {
 +        return ioManager.getAbsoluteFileRef(ioDeviceId, relativePath);
 +    }
 +
 +    public static FeedLogManager getFeedLogManager(IHyracksTaskContext ctx, int partition,
 +            FileSplit[] feedLogFileSplits) throws HyracksDataException {
 +        return new FeedLogManager(
 +                FeedUtils.getAbsoluteFileRef(feedLogFileSplits[partition].getLocalFile().getFile().getPath(),
 +                        feedLogFileSplits[partition].getIODeviceId(), ctx.getIOManager()).getFile());
 +    }
 +
 +    public static FeedLogManager getFeedLogManager(IHyracksTaskContext ctx, FileSplit feedLogFileSplit)
 +            throws HyracksDataException {
 +        return new FeedLogManager(FeedUtils.getAbsoluteFileRef(feedLogFileSplit.getLocalFile().getFile().getPath(),
 +                feedLogFileSplit.getIODeviceId(), ctx.getIOManager()).getFile());
 +    }
 +
 +    public static void processFeedMessage(ByteBuffer input, ByteBuffer message, FrameTupleAccessor fta) {
 +        // read the message and reduce the number of tuples
 +        fta.reset(input);
 +        int tc = fta.getTupleCount() - 1;
 +        int offset = fta.getTupleStartOffset(tc);
 +        int len = fta.getTupleLength(tc);
 +        message.clear();
 +        message.put(input.array(), offset, len);
 +        message.flip();
 +        IntSerDeUtils.putInt(input.array(), FrameHelper.getTupleCountOffset(input.capacity()), tc);
 +    }
 +
 +    public static int getNumOfFields(Map<String, String> configuration) {
 +        return 1;
 +    }
 +
 +    public static String getFeedMetaTypeName(Map<String, String> configuration) {
 +        return configuration.get(ExternalDataConstants.KEY_META_TYPE_NAME);
 +
 +    }
 +}



[22/50] [abbrv] incubator-asterixdb git commit: Merge branch 'master' into hyracks-merge2

Posted by im...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/results/feeds/feed-with-external-parser/feed-with-external-parser.1.adm
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/results/feeds/feed-with-external-parser/feed-with-external-parser.1.adm
index edc05f2,0000000..3385a20
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/results/feeds/feed-with-external-parser/feed-with-external-parser.1.adm
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/results/feeds/feed-with-external-parser/feed-with-external-parser.1.adm
@@@ -1,99 -1,0 +1,99 @@@
- { "GlobalJobId": "submit-3.chtc.wisc.edu#48968872.0#1445354636", "StatsLifetimeStarter": 572059, "JobStartDate": 1445362267, "SubmitEventNotes": "DAG Node: fabp4-0002+fabp4-0002", "JobStatus": 4, "LeaveJobInQueue": false, "WantGlidein": true, "StartdPrincipal": "execute-side@matchsession/128.104.119.175", "WantRHEL6": true, "OnExitRemove": true, "JobCurrentStartDate": 1445561276, "CoreSize": 0, "MATCH_EXP_JOBGLIDEIN_ResourceName": "wisc.edu", "Rank": 0.0d, "ExitStatus": 0, "ResidentSetSize": 125000, "ScheddBday": 1445383086, "RemoteWallClockTime": 769511.0d, "WantCheckpoint": false, "In": "/dev/null", "LastVacateTime": 1445546251, "MaxHosts": 1, "RootDir": "/", "NumRestarts": 0, "RecentBlockWriteKbytes": 0, "DiskUsage_RAW": 32543, "EnteredCurrentStatus": 1446133322, "ResidentSetSize_RAW": 100432, "RequestDisk": 1000000, "MyType": "Job", "PeriodicRemove": false, "Cmd": "/home/ssericksen/dude-14-xdock/ChtcRun/chtcjobwrapper", "CondorVersion": "$CondorVersion: 8.5.0 Sep 16 2015 BuildI
 D: 341710 $", "ShouldTransferFiles": "YES", "TargetType": "Machine", "MinHosts": 1, "NumCkpts_RAW": 0, "RequestCpus": 1, "RemoteUserCpu": 571737.0d, "BlockWrites": 0, "NiceUser": false, "Out": "process.out", "ImageSize_RAW": 690056, "BytesSent": 3113566.0d, "CumulativeSuspensionTime": 0, "TransferIn": false, "NumCkpts": 0, "Err": "process.err", "RecentBlockWrites": 0, "JobFinishedHookDone": 1446133322, "ProcId": 0, "ImageSize": 750000, "JobUniverse": 5, "EncryptExecuteDirectory": false, "TransferInputSizeMB": 12, "RecentBlockReads": 0, "SpooledOutputFiles": "ChtcWrapperfabp4-0002.out,AuditLog.fabp4-0002,poses.mol2,CURLTIME_4057178,harvest.log,time_elapsed.log,surf_scores.txt,CURLTIME_38803,count.log,fabp4-0002.out,CURLTIME_253463", "NumJobReconnects": 1, "WantFlocking": true, "BlockWriteKbytes": 0, "WhenToTransferOutput": "ON_EXIT_OR_EVICT", "JobCurrentStartExecutingDate": 1445561278, "ExitBySignal": false, "LastMatchTime": 1445561276, "OnExitHold": false, "OrigMaxHosts": 1, "Reques
 tMemory": 1000, "NumJobStarts": 6, "TerminationPending": true, "TotalSuspensions": 0, "BlockReads": 0, "DAGManJobId": 48940805, "MemoryUsage": "null( ( ResidentSetSize + 1023 ) / 1024 );\nnull", "ExitCode": 0, "JobNotification": 0, "BlockReadKbytes": 0, "NumJobMatches": 6, "LocalUserCpu": 0.0d, "LastJobStatus": 2, "BufferBlockSize": 32768, "CommittedTime": 572046, "ExecutableSize_RAW": 6, "LastRejMatchReason": "no match found", "LastSuspensionTime": 0, "UserLog": "/home/ssericksen/dude-14-xdock/ChtcRun/dude14-surf-out-esr1/fabp4-0002/process.log", "DAGManNodesMask": "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27", "CumulativeSlotTime": 769511.0d, "LastJobLeaseRenewal": 1446133322, "MachineAttrSlotWeight0": 1, "NumSystemHolds": 0, "BytesRecvd": 8.7351688E7d, "CondorPlatform": "$CondorPlatform: X86_64-RedHat_6.6 $", "JOBGLIDEIN_ResourceName": "$$([IfThenElse(IsUndefined(TARGET.GLIDEIN_ResourceName), IfThenElse(IsUndefined(TARGET.GLIDEIN_Site), \"wisc.edu\", TARGET.GLIDEIN_Site), TARGET.GLIDEI
 N_ResourceName)])", "DAGNodeName": "fabp4-0002+fabp4-0002", "PeriodicRelease": "null( JobStatus == 5 ) && ( ( CurrentTime - EnteredCurrentStatus ) > 1800 ) && ( JobRunCount < 5 ) && ( HoldReasonCode != 6 ) && ( HoldReasonCode != 14 ) && ( HoldReasonCode != 22 );\nnull", "JobRunCount": 7, "LastRemotePool": "condor.biochem.wisc.edu:9618?sock=collector", "JobLastStartDate": 1445546257, "LastRemoteHost": "slot1@cluster-0008.biochem.wisc.edu", "JobPrio": 0, "LocalSysCpu": 0.0d, "ExecutableSize": 7, "RemoteSysCpu": 0.0d, "TransferInput": "/home/ssericksen/dude-14-xdock/ChtcRun/dude14-surf-in/fabp4-0002/,/home/ssericksen/dude-14-xdock/ChtcRun/dude14-surf-in/shared/", "PeriodicHold": false, "WantRemoteIO": true, "CommittedSuspensionTime": 0, "DAGParentNodeNames": "", "CompletionDate": 1446133322, "StreamErr": false, "is_resumable": true, "RecentBlockReadKbytes": 0, "WantRemoteSyscalls": false, "NumShadowStarts": 7, "MachineAttrCpus0": 1, "DAGManNodesLog": "/home/ssericksen/dude-14-xdock/Cht
 cRun/dude14-surf-out-esr1/./mydag.dag.nodes.log", "Owner": "ssericksen", "Requirements": "null( ( OpSysMajorVer is 6 ) ) && ( MY.JobUniverse == 12 || MY.JobUniverse == 7 || MY.WantFlocking || MY.WantGlidein || TARGET.PoolName == \"CHTC\" || TARGET.COLLECTOR_HOST_STRING == \"infopool.cs.wisc.edu\" ) && ( TARGET.Arch == \"X86_64\" ) && ( TARGET.OpSys == \"LINUX\" ) && ( TARGET.Disk >= RequestDisk ) && ( TARGET.Memory >= RequestMemory ) && ( TARGET.HasFileTransfer );\nnull", "DiskUsage": 35000, "LastRejMatchTime": 1445375317, "JobLeaseDuration": 2400, "ClusterId": 48968872, "BufferSize": 524288, "IsCHTCSubmit": true, "RecentStatsLifetimeStarter": 1200, "CommittedSlotTime": 572046.0d, "Args": "--type=Other --cmdtorun=surflex_run_DUDE_v1.8_esr1.sh --unique=fabp4-0002 --", "Environment": "", "LastPublicClaimId": "<128.104.119.175:9618>#1444067179#3317#...", "Iwd": "/home/ssericksen/dude-14-xdock/ChtcRun/dude14-surf-out-esr1/fabp4-0002", "QDate": 1445354636, "CurrentHosts": 0, "User": "sse
 ricksen@chtc.wisc.edu", "StreamOut": false }
- { "GlobalJobId": "submit-3.chtc.wisc.edu#49573720.0#1445938922", "StatsLifetimeStarter": 190245, "JobStartDate": 1445943852, "SubmitEventNotes": "DAG Node: 180+180", "JobStatus": 4, "LeaveJobInQueue": false, "StartdPrincipal": "execute-side@matchsession/128.105.245.72", "WantRHEL6": true, "OnExitRemove": true, "JobCurrentStartDate": 1445943852, "CoreSize": 0, "MATCH_EXP_JOBGLIDEIN_ResourceName": "wisc.edu", "Rank": 0.0d, "ExitStatus": 0, "ResidentSetSize": 125000, "RemoteWallClockTime": 190247.0d, "WantCheckpoint": false, "In": "/dev/null", "MaxHosts": 1, "RootDir": "/", "NumRestarts": 0, "RecentBlockWriteKbytes": 0, "DiskUsage_RAW": 1216669, "EnteredCurrentStatus": 1446134099, "ResidentSetSize_RAW": 123680, "RequestDisk": 1000000, "MyType": "Job", "PeriodicRemove": false, "Cmd": "/home/xguo23/finally/Simulation_condor/chtcjobwrapper", "CondorVersion": "$CondorVersion: 8.5.0 Sep 16 2015 BuildID: 341710 $", "ShouldTransferFiles": "YES", "TargetType": "Machine", "MinHosts": 1, "NumCk
 pts_RAW": 0, "RequestCpus": 1, "RemoteUserCpu": 185236.0d, "BlockWrites": 0, "NiceUser": false, "Out": "process.out", "ImageSize_RAW": 811948, "BytesSent": 30766.0d, "CumulativeSuspensionTime": 0, "TransferIn": false, "NumCkpts": 0, "Err": "process.err", "RecentBlockWrites": 0, "JobFinishedHookDone": 1446134099, "ProcId": 0, "ImageSize": 1000000, "JobUniverse": 5, "EncryptExecuteDirectory": false, "TransferInputSizeMB": 0, "RecentBlockReads": 0, "SpooledOutputFiles": "CURLTIME_3753852,ChtcWrapper180.out,AuditLog.180,simu_3_180.txt,harvest.log,180.out", "BlockWriteKbytes": 0, "WhenToTransferOutput": "ON_EXIT", "JobCurrentStartExecutingDate": 1445943853, "ExitBySignal": false, "LastMatchTime": 1445943852, "OnExitHold": false, "OrigMaxHosts": 1, "RequestMemory": 1000, "NumJobStarts": 1, "TerminationPending": true, "TotalSuspensions": 0, "BlockReads": 0, "DAGManJobId": 49572657, "MemoryUsage": "null( ( ResidentSetSize + 1023 ) / 1024 );\nnull", "ExitCode": 0, "JobNotification": 0, "Bloc
 kReadKbytes": 0, "NumJobMatches": 1, "LocalUserCpu": 0.0d, "LastJobStatus": 2, "BufferBlockSize": 32768, "CommittedTime": 190247, "ExecutableSize_RAW": 6, "LastSuspensionTime": 0, "Matlab": "R2011b", "UserLog": "/home/xguo23/finally/Simulation_condor/model_3/180/process.log", "DAGManNodesMask": "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27", "CumulativeSlotTime": 190247.0d, "LastJobLeaseRenewal": 1446134099, "MachineAttrSlotWeight0": 1, "NumSystemHolds": 0, "BytesRecvd": 284367.0d, "CondorPlatform": "$CondorPlatform: X86_64-RedHat_6.6 $", "JOBGLIDEIN_ResourceName": "$$([IfThenElse(IsUndefined(TARGET.GLIDEIN_ResourceName), IfThenElse(IsUndefined(TARGET.GLIDEIN_Site), \"wisc.edu\", TARGET.GLIDEIN_Site), TARGET.GLIDEIN_ResourceName)])", "DAGNodeName": "180+180", "PeriodicRelease": "null( JobStatus == 5 ) && ( ( CurrentTime - EnteredCurrentStatus ) > 1800 ) && ( JobRunCount < 5 ) && ( HoldReasonCode != 6 ) && ( HoldReasonCode != 14 ) && ( HoldReasonCode != 22 );\nnull", "JobRunCount": 1, "Last
 RemoteHost": "slot1@e272.chtc.wisc.edu", "JobPrio": 0, "LocalSysCpu": 0.0d, "ExecutableSize": 7, "RemoteSysCpu": 1835.0d, "TransferInput": "/home/xguo23/finally/Simulation_condor/data/180/,/home/xguo23/finally/Simulation_condor/data/shared/", "PeriodicHold": false, "WantRemoteIO": true, "CommittedSuspensionTime": 0, "DAGParentNodeNames": "", "CompletionDate": 1446134099, "StreamErr": false, "RecentBlockReadKbytes": 0, "WantRemoteSyscalls": false, "NumShadowStarts": 1, "MachineAttrCpus0": 1, "DAGManNodesLog": "/home/xguo23/finally/Simulation_condor/model_3/./mydag.dag.nodes.log", "Owner": "xguo23", "Requirements": "null( ( OpSysMajorVer is 6 ) ) && ( MY.JobUniverse == 12 || MY.JobUniverse == 7 || MY.WantFlocking || MY.WantGlidein || TARGET.PoolName == \"CHTC\" || TARGET.COLLECTOR_HOST_STRING == \"infopool.cs.wisc.edu\" ) && ( TARGET.Arch == \"X86_64\" ) && ( TARGET.OpSys == \"LINUX\" ) && ( TARGET.Disk >= RequestDisk ) && ( TARGET.Memory >= RequestMemory ) && ( TARGET.HasFileTransfer
  );\nnull", "DiskUsage": 1250000, "JobLeaseDuration": 2400, "ClusterId": 49573720, "BufferSize": 524288, "IsCHTCSubmit": true, "RecentStatsLifetimeStarter": 1200, "CommittedSlotTime": 190247.0d, "Args": "--type=Matlab --version=R2011b --cmdtorun=simu_condor --unique=180 -- 3", "Environment": "", "LastPublicClaimId": "<128.105.245.72:29075>#1444753997#6000#...", "Iwd": "/home/xguo23/finally/Simulation_condor/model_3/180", "QDate": 1445938922, "CurrentHosts": 0, "User": "xguo23@chtc.wisc.edu", "StreamOut": false }
- { "GlobalJobId": "submit-3.chtc.wisc.edu#49581952.0#1446105329", "StatsLifetimeStarter": 27674, "JobStartDate": 1446106061, "SubmitEventNotes": "DAG Node: 40+40", "JobStatus": 4, "LeaveJobInQueue": false, "StartdPrincipal": "execute-side@matchsession/128.105.245.86", "WantRHEL6": true, "OnExitRemove": true, "JobCurrentStartDate": 1446106061, "CoreSize": 0, "MATCH_EXP_JOBGLIDEIN_ResourceName": "wisc.edu", "Rank": 0.0d, "ExitStatus": 0, "ResidentSetSize": 150000, "RemoteWallClockTime": 27676.0d, "WantCheckpoint": false, "In": "/dev/null", "MaxHosts": 1, "RootDir": "/", "NumRestarts": 0, "RecentBlockWriteKbytes": 0, "DiskUsage_RAW": 1216669, "EnteredCurrentStatus": 1446133737, "ResidentSetSize_RAW": 127252, "RequestDisk": 1000000, "MyType": "Job", "PeriodicRemove": false, "Cmd": "/home/xguo23/finally_2/Simulation_condor/chtcjobwrapper", "CondorVersion": "$CondorVersion: 8.5.0 Sep 16 2015 BuildID: 341710 $", "ShouldTransferFiles": "YES", "TargetType": "Machine", "MinHosts": 1, "NumCkpt
 s_RAW": 0, "RequestCpus": 1, "RemoteUserCpu": 27510.0d, "BlockWrites": 0, "NiceUser": false, "Out": "process.out", "ImageSize_RAW": 811948, "BytesSent": 30584.0d, "CumulativeSuspensionTime": 0, "TransferIn": false, "NumCkpts": 0, "Err": "process.err", "RecentBlockWrites": 0, "JobFinishedHookDone": 1446133737, "ProcId": 0, "ImageSize": 1000000, "JobUniverse": 5, "EncryptExecuteDirectory": false, "TransferInputSizeMB": 0, "RecentBlockReads": 0, "SpooledOutputFiles": "CURLTIME_369560,ChtcWrapper40.out,AuditLog.40,simu_3_40.txt,harvest.log,40.out", "BlockWriteKbytes": 0, "WhenToTransferOutput": "ON_EXIT", "JobCurrentStartExecutingDate": 1446106063, "ExitBySignal": false, "LastMatchTime": 1446106061, "OnExitHold": false, "OrigMaxHosts": 1, "RequestMemory": 1000, "NumJobStarts": 1, "TerminationPending": true, "TotalSuspensions": 0, "BlockReads": 0, "DAGManJobId": 49581933, "MemoryUsage": "null( ( ResidentSetSize + 1023 ) / 1024 );\nnull", "ExitCode": 0, "JobNotification": 0, "BlockReadKby
 tes": 0, "NumJobMatches": 1, "LocalUserCpu": 0.0d, "LastJobStatus": 2, "BufferBlockSize": 32768, "CommittedTime": 27676, "ExecutableSize_RAW": 6, "LastSuspensionTime": 0, "Matlab": "R2011b", "UserLog": "/home/xguo23/finally_2/Simulation_condor/model_3/40/process.log", "DAGManNodesMask": "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27", "CumulativeSlotTime": 27676.0d, "LastJobLeaseRenewal": 1446133737, "MachineAttrSlotWeight0": 1, "NumSystemHolds": 0, "BytesRecvd": 285053.0d, "CondorPlatform": "$CondorPlatform: X86_64-RedHat_6.6 $", "JOBGLIDEIN_ResourceName": "$$([IfThenElse(IsUndefined(TARGET.GLIDEIN_ResourceName), IfThenElse(IsUndefined(TARGET.GLIDEIN_Site), \"wisc.edu\", TARGET.GLIDEIN_Site), TARGET.GLIDEIN_ResourceName)])", "DAGNodeName": "40+40", "PeriodicRelease": "null( JobStatus == 5 ) && ( ( CurrentTime - EnteredCurrentStatus ) > 1800 ) && ( JobRunCount < 5 ) && ( HoldReasonCode != 6 ) && ( HoldReasonCode != 14 ) && ( HoldReasonCode != 22 );\nnull", "JobRunCount": 1, "LastRemoteHost"
 : "slot1@e286.chtc.wisc.edu", "JobPrio": 0, "LocalSysCpu": 0.0d, "ExecutableSize": 7, "RemoteSysCpu": 105.0d, "TransferInput": "/home/xguo23/finally_2/Simulation_condor/data/40/,/home/xguo23/finally_2/Simulation_condor/data/shared/", "PeriodicHold": false, "WantRemoteIO": true, "CommittedSuspensionTime": 0, "DAGParentNodeNames": "", "CompletionDate": 1446133737, "StreamErr": false, "RecentBlockReadKbytes": 0, "WantRemoteSyscalls": false, "NumShadowStarts": 1, "MachineAttrCpus0": 1, "DAGManNodesLog": "/home/xguo23/finally_2/Simulation_condor/model_3/./mydag.dag.nodes.log", "Owner": "xguo23", "Requirements": "null( ( OpSysMajorVer is 6 ) ) && ( MY.JobUniverse == 12 || MY.JobUniverse == 7 || MY.WantFlocking || MY.WantGlidein || TARGET.PoolName == \"CHTC\" || TARGET.COLLECTOR_HOST_STRING == \"infopool.cs.wisc.edu\" ) && ( TARGET.Arch == \"X86_64\" ) && ( TARGET.OpSys == \"LINUX\" ) && ( TARGET.Disk >= RequestDisk ) && ( TARGET.Memory >= RequestMemory ) && ( TARGET.HasFileTransfer );\nnu
 ll", "DiskUsage": 1250000, "JobLeaseDuration": 2400, "ClusterId": 49581952, "BufferSize": 524288, "IsCHTCSubmit": true, "RecentStatsLifetimeStarter": 1200, "CommittedSlotTime": 27676.0d, "Args": "--type=Matlab --version=R2011b --cmdtorun=simu_condor --unique=40 -- 3", "Environment": "", "LastPublicClaimId": "<128.105.245.86:32129>#1444759888#6329#...", "Iwd": "/home/xguo23/finally_2/Simulation_condor/model_3/40", "QDate": 1446105329, "CurrentHosts": 0, "User": "xguo23@chtc.wisc.edu", "StreamOut": false }
- { "GlobalJobId": "submit-3.chtc.wisc.edu#49581985.0#1446105368", "StatsLifetimeStarter": 26354, "JobStartDate": 1446106289, "SubmitEventNotes": "DAG Node: 36+36", "JobStatus": 4, "LeaveJobInQueue": false, "StartdPrincipal": "execute-side@matchsession/128.105.244.249", "WantRHEL6": true, "OnExitRemove": true, "JobCurrentStartDate": 1446106289, "CoreSize": 0, "MATCH_EXP_JOBGLIDEIN_ResourceName": "wisc.edu", "Rank": 0.0d, "ExitStatus": 0, "ResidentSetSize": 150000, "RemoteWallClockTime": 26357.0d, "WantCheckpoint": false, "In": "/dev/null", "MaxHosts": 1, "RootDir": "/", "NumRestarts": 0, "RecentBlockWriteKbytes": 0, "DiskUsage_RAW": 1216669, "EnteredCurrentStatus": 1446132646, "ResidentSetSize_RAW": 127452, "RequestDisk": 1000000, "MyType": "Job", "PeriodicRemove": false, "Cmd": "/home/xguo23/finally_2/Simulation_condor/chtcjobwrapper", "CondorVersion": "$CondorVersion: 8.5.0 Sep 16 2015 BuildID: 341710 $", "ShouldTransferFiles": "YES", "TargetType": "Machine", "MinHosts": 1, "NumCkp
 ts_RAW": 0, "RequestCpus": 1, "RemoteUserCpu": 26239.0d, "BlockWrites": 0, "NiceUser": false, "Out": "process.out", "ImageSize_RAW": 811948, "BytesSent": 31898.0d, "CumulativeSuspensionTime": 0, "TransferIn": false, "NumCkpts": 0, "Err": "process.err", "RecentBlockWrites": 0, "JobFinishedHookDone": 1446132646, "ProcId": 0, "ImageSize": 1000000, "JobUniverse": 5, "EncryptExecuteDirectory": false, "TransferInputSizeMB": 0, "RecentBlockReads": 0, "SpooledOutputFiles": "CURLTIME_1151700,ChtcWrapper36.out,AuditLog.36,simu_3_36.txt,harvest.log,36.out", "BlockWriteKbytes": 0, "WhenToTransferOutput": "ON_EXIT", "JobCurrentStartExecutingDate": 1446106291, "ExitBySignal": false, "LastMatchTime": 1446106289, "OnExitHold": false, "OrigMaxHosts": 1, "RequestMemory": 1000, "NumJobStarts": 1, "TerminationPending": true, "TotalSuspensions": 0, "BlockReads": 0, "DAGManJobId": 49581933, "MemoryUsage": "null( ( ResidentSetSize + 1023 ) / 1024 );\nnull", "ExitCode": 0, "JobNotification": 0, "BlockReadK
 bytes": 0, "NumJobMatches": 1, "LocalUserCpu": 0.0d, "LastJobStatus": 2, "BufferBlockSize": 32768, "CommittedTime": 26357, "ExecutableSize_RAW": 6, "LastSuspensionTime": 0, "Matlab": "R2011b", "UserLog": "/home/xguo23/finally_2/Simulation_condor/model_3/36/process.log", "DAGManNodesMask": "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27", "CumulativeSlotTime": 26357.0d, "LastJobLeaseRenewal": 1446132646, "MachineAttrSlotWeight0": 1, "NumSystemHolds": 0, "BytesRecvd": 285053.0d, "CondorPlatform": "$CondorPlatform: X86_64-RedHat_6.6 $", "JOBGLIDEIN_ResourceName": "$$([IfThenElse(IsUndefined(TARGET.GLIDEIN_ResourceName), IfThenElse(IsUndefined(TARGET.GLIDEIN_Site), \"wisc.edu\", TARGET.GLIDEIN_Site), TARGET.GLIDEIN_ResourceName)])", "DAGNodeName": "36+36", "PeriodicRelease": "null( JobStatus == 5 ) && ( ( CurrentTime - EnteredCurrentStatus ) > 1800 ) && ( JobRunCount < 5 ) && ( HoldReasonCode != 6 ) && ( HoldReasonCode != 14 ) && ( HoldReasonCode != 22 );\nnull", "JobRunCount": 1, "LastRemoteHos
 t": "slot1@e457.chtc.wisc.edu", "JobPrio": 0, "LocalSysCpu": 0.0d, "ExecutableSize": 7, "RemoteSysCpu": 96.0d, "TransferInput": "/home/xguo23/finally_2/Simulation_condor/data/36/,/home/xguo23/finally_2/Simulation_condor/data/shared/", "PeriodicHold": false, "WantRemoteIO": true, "CommittedSuspensionTime": 0, "DAGParentNodeNames": "", "CompletionDate": 1446132646, "StreamErr": false, "RecentBlockReadKbytes": 0, "WantRemoteSyscalls": false, "NumShadowStarts": 1, "MachineAttrCpus0": 1, "DAGManNodesLog": "/home/xguo23/finally_2/Simulation_condor/model_3/./mydag.dag.nodes.log", "Owner": "xguo23", "Requirements": "null( ( OpSysMajorVer is 6 ) ) && ( MY.JobUniverse == 12 || MY.JobUniverse == 7 || MY.WantFlocking || MY.WantGlidein || TARGET.PoolName == \"CHTC\" || TARGET.COLLECTOR_HOST_STRING == \"infopool.cs.wisc.edu\" ) && ( TARGET.Arch == \"X86_64\" ) && ( TARGET.OpSys == \"LINUX\" ) && ( TARGET.Disk >= RequestDisk ) && ( TARGET.Memory >= RequestMemory ) && ( TARGET.HasFileTransfer );\nn
 ull", "DiskUsage": 1250000, "JobLeaseDuration": 2400, "ClusterId": 49581985, "BufferSize": 524288, "IsCHTCSubmit": true, "RecentStatsLifetimeStarter": 1200, "CommittedSlotTime": 26357.0d, "Args": "--type=Matlab --version=R2011b --cmdtorun=simu_condor --unique=36 -- 3", "Environment": "", "LastPublicClaimId": "<128.105.244.249:28476>#1444685646#10655#...", "Iwd": "/home/xguo23/finally_2/Simulation_condor/model_3/36", "QDate": 1446105368, "CurrentHosts": 0, "User": "xguo23@chtc.wisc.edu", "StreamOut": false }
- { "GlobalJobId": "submit-3.chtc.wisc.edu#49581989.0#1446105374", "StatsLifetimeStarter": 27490, "JobStartDate": 1446106290, "SubmitEventNotes": "DAG Node: 82+82", "JobStatus": 4, "LeaveJobInQueue": false, "AutoClusterId": 24, "StartdPrincipal": "execute-side@matchsession/128.105.245.233", "WantRHEL6": true, "OnExitRemove": true, "JobCurrentStartDate": 1446106290, "CoreSize": 0, "MATCH_EXP_JOBGLIDEIN_ResourceName": "wisc.edu", "ExitStatus": 0, "Rank": 0.0d, "ResidentSetSize": 150000, "RemoteWallClockTime": 27491.0d, "WantCheckpoint": false, "In": "/dev/null", "MaxHosts": 1, "RootDir": "/", "NumRestarts": 0, "RecentBlockWriteKbytes": 0, "DiskUsage_RAW": 1216669, "EnteredCurrentStatus": 1446133781, "ResidentSetSize_RAW": 126932, "RequestDisk": 1000000, "MyType": "Job", "PeriodicRemove": false, "Cmd": "/home/xguo23/finally_2/Simulation_condor/chtcjobwrapper", "CondorVersion": "$CondorVersion: 8.5.0 Sep 16 2015 BuildID: 341710 $", "ShouldTransferFiles": "YES", "TargetType": "Machine", "
 MinHosts": 1, "NumCkpts_RAW": 0, "RequestCpus": 1, "RemoteUserCpu": 27288.0d, "BlockWrites": 0, "NiceUser": false, "Out": "process.out", "ImageSize_RAW": 811948, "BytesSent": 30553.0d, "CumulativeSuspensionTime": 0, "TransferIn": false, "AutoClusterAttrs": "JobUniverse,LastCheckpointPlatform,NumCkpts,ClientMachine,_condor_RequestCpus,_condor_RequestDisk,_condor_RequestGPUs,_condor_RequestMemory,RequestCpus,RequestDisk,RequestGPUs,RequestMemory,BIOCHEM,MachineLastMatchTime,ConcurrencyLimits,NiceUser,Rank,Requirements,ImageSize,MemoryRequirements,User,RemoteGroup,SubmitterGroup,SubmitterUserPrio,Group,WIDsTheme,InteractiveJob,Is_Resumable,WantFlocking,WantGlidein,Scheduler,Owner,JobStart,MemoryUsage,IsExpressQueueJob,DiskUsage,HEP_VO,IsDesktop,OSG_VO,x509userproxysubject,PassedTest,IsLocalCMSJob,IsLocalCMSSlot,IsSAMSlot,IsSAMJob,MaxDiskTempC,IsDedicated,estimated_run_hours,IsCHTCSubmit,RequiresCVMFS,DynamicSlot,PartitionableSlot,Slot1_ExpectedMachineGracefulDrainingCompletion,Slot1_Jo
 bStarts,Slot1_SelfMonitorAge,Slot2_ExpectedMachineGracefulDrainingCompletion,Slot2_JobStarts,Slot2_SelfMonitorAge,Slot3_ExpectedMachineGracefulDrainingCompletion,Slot3_JobStarts,Slot3_SelfMonitorAge,Slot4_ExpectedMachineGracefulDrainingCompletion,Slot4_JobStarts,Slot4_SelfMonitorAge,Slot5_ExpectedMachineGracefulDrainingCompletion,Slot5_JobStarts,Slot5_SelfMonitorAge,Slot6_ExpectedMachineGracefulDrainingCompletion,Slot6_JobStarts,Slot6_SelfMonitorAge,Slot7_ExpectedMachineGracefulDrainingCompletion,Slot7_JobStarts,Slot7_SelfMonitorAge,Slot8_ExpectedMachineGracefulDrainingCompletion,Slot8_JobStarts,Slot8_SelfMonitorAge,Slot1_TotalTimeClaimedBusy,Slot1_TotalTimeUnclaimedIdle,Slot2_TotalTimeClaimedBusy,Slot2_TotalTimeUnclaimedIdle,Slot3_TotalTimeClaimedBusy,Slot3_TotalTimeUnclaimedIdle,Slot4_TotalTimeClaimedBusy,Slot4_TotalTimeUnclaimedIdle,Slot5_TotalTimeClaimedBusy,Slot5_TotalTimeUnclaimedIdle,Slot6_TotalTimeClaimedBusy,Slot6_TotalTimeUnclaimedIdle,Slot7_TotalTimeClaimedBusy,Slot7_Tota
 lTimeUnclaimedIdle,Slot8_TotalTimeClaimedBusy,Slot8_TotalTimeUnclaimedIdle,Slot10_ExpectedMachineGracefulDrainingCompletion,Slot10_JobStarts,Slot10_SelfMonitorAge,Slot11_ExpectedMachineGracefulDrainingCompletion,Slot11_JobStarts,Slot11_SelfMonitorAge,Slot12_ExpectedMachineGracefulDrainingCompletion,Slot12_JobStarts,Slot12_SelfMonitorAge,Slot9_ExpectedMachineGracefulDrainingCompletion,Slot9_JobStarts,Slot9_SelfMonitorAge,Slot12_TotalTimeClaimedBusy,Slot10_TotalTimeClaimedBusy,Slot10_TotalTimeUnclaimedIdle,Slot11_TotalTimeClaimedBusy,Slot11_TotalTimeUnclaimedIdle,Slot12_TotalTimeUnclaimedIdle,Slot9_TotalTimeClaimedBusy,Slot9_TotalTimeUnclaimedIdle,Slot13_ExpectedMachineGracefulDrainingCompletion,Slot13_JobStarts,Slot13_SelfMonitorAge,Slot14_ExpectedMachineGracefulDrainingCompletion,Slot14_JobStarts,Slot14_SelfMonitorAge,Slot15_ExpectedMachineGracefulDrainingCompletion,Slot15_JobStarts,Slot15_SelfMonitorAge,Slot16_ExpectedMachineGracefulDrainingCompletion,Slot16_JobStarts,Slot16_SelfMo
 nitorAge,IsResumable,WHEN_TO_TRANSFER_OUTPUT,_condor_Requestadmin_mutex_1,_condor_Requestadmin_mutex_2,_condor_Requestadmin_mutex_3,_condor_Requestmachine_token,Requestadmin_mutex_1,Requestadmin_mutex_2,Requestadmin_mutex_3,Requestmachine_token,nyehle,IsBuildJob,IsMatlabBuildJob,TotalJobRunTime,NodeOnline,Slot13_TotalTimeClaimedBusy,Slot13_TotalTimeUnclaimedIdle,Slot14_TotalTimeClaimedBusy,Slot14_TotalTimeUnclaimedIdle,Slot15_TotalTimeClaimedBusy,Slot15_TotalTimeUnclaimedIdle,Slot16_TotalTimeClaimedBusy,Slot16_TotalTimeUnclaimedIdle,TmpIsFull,trResumable,RequiresCMSFrontier,Slot17_ExpectedMachineGracefulDrainingCompletion,Slot17_JobStarts,Slot17_SelfMonitorAge,Slot17_TotalTimeClaimedBusy,Slot17_TotalTimeUnclaimedIdle,Slot18_ExpectedMachineGracefulDrainingCompletion,Slot18_JobStarts,Slot18_SelfMonitorAge,Slot18_TotalTimeClaimedBusy,Slot18_TotalTimeUnclaimedIdle,Slot19_ExpectedMachineGracefulDrainingCompletion,Slot19_JobStarts,Slot19_SelfMonitorAge,Slot19_TotalTimeClaimedBusy,Slot19_T
 otalTimeUnclaimedIdle,Slot20_ExpectedMachineGracefulDrainingCompletion,Slot20_JobStarts,Slot20_SelfMonitorAge,Slot20_TotalTimeClaimedBusy,Slot20_TotalTimeUnclaimedIdle,Slot21_ExpectedMachineGracefulDrainingCompletion,Slot21_JobStarts,Slot21_SelfMonitorAge,Slot21_TotalTimeClaimedBusy,Slot21_TotalTimeUnclaimedIdle,Slot22_ExpectedMachineGracefulDrainingCompletion,Slot22_JobStarts,Slot22_SelfMonitorAge,Slot22_TotalTimeClaimedBusy,Slot22_TotalTimeUnclaimedIdle,Slot23_ExpectedMachineGracefulDrainingCompletion,Slot23_JobStarts,Slot23_SelfMonitorAge,Slot23_TotalTimeClaimedBusy,Slot23_TotalTimeUnclaimedIdle,Slot24_ExpectedMachineGracefulDrainingCompletion,Slot24_JobStarts,Slot24_SelfMonitorAge,Slot24_TotalTimeClaimedBusy,Slot24_TotalTimeUnclaimedIdle,Slot25_ExpectedMachineGracefulDrainingCompletion,Slot25_JobStarts,Slot25_SelfMonitorAge,Slot25_TotalTimeClaimedBusy,Slot25_TotalTimeUnclaimedIdle,Slot26_ExpectedMachineGracefulDrainingCompletion,Slot26_JobStarts,Slot26_SelfMonitorAge,Slot26_Tota
 lTimeClaimedBusy,Slot26_TotalTimeUnclaimedIdle,Slot27_ExpectedMachineGracefulDrainingCompletion,Slot27_JobStarts,Slot27_SelfMonitorAge,Slot27_TotalTimeClaimedBusy,Slot27_TotalTimeUnclaimedIdle,Slot28_ExpectedMachineGracefulDrainingCompletion,Slot28_JobStarts,Slot28_SelfMonitorAge,Slot28_TotalTimeClaimedBusy,Slot28_TotalTimeUnclaimedIdle,Slot29_ExpectedMachineGracefulDrainingCompletion,Slot29_JobStarts,Slot29_SelfMonitorAge,Slot29_TotalTimeClaimedBusy,Slot29_TotalTimeUnclaimedIdle,Slot30_ExpectedMachineGracefulDrainingCompletion,Slot30_JobStarts,Slot30_SelfMonitorAge,Slot30_TotalTimeClaimedBusy,Slot30_TotalTimeUnclaimedIdle,Slot31_ExpectedMachineGracefulDrainingCompletion,Slot31_JobStarts,Slot31_SelfMonitorAge,Slot31_TotalTimeClaimedBusy,Slot31_TotalTimeUnclaimedIdle,Slot32_ExpectedMachineGracefulDrainingCompletion,Slot32_JobStarts,Slot32_SelfMonitorAge,Slot32_TotalTimeClaimedBusy,Slot32_TotalTimeUnclaimedIdle,ResidentSetSize", "NumCkpts": 0, "Err": "process.err", "RecentBlockWrites"
 : 0, "JobFinishedHookDone": 1446133782, "ProcId": 0, "ImageSize": 1000000, "JobUniverse": 5, "EncryptExecuteDirectory": false, "TransferInputSizeMB": 0, "RecentBlockReads": 0, "SpooledOutputFiles": "CURLTIME_4096502,ChtcWrapper82.out,AuditLog.82,simu_3_82.txt,harvest.log,82.out", "BlockWriteKbytes": 0, "WhenToTransferOutput": "ON_EXIT", "JobCurrentStartExecutingDate": 1446106291, "ExitBySignal": false, "LastMatchTime": 1446106290, "OnExitHold": false, "OrigMaxHosts": 1, "RequestMemory": 1000, "NumJobStarts": 1, "TerminationPending": true, "TotalSuspensions": 0, "BlockReads": 0, "DAGManJobId": 49581933, "MemoryUsage": "null( ( ResidentSetSize + 1023 ) / 1024 );\nnull", "ExitCode": 0, "JobNotification": 0, "BlockReadKbytes": 0, "NumJobMatches": 1, "LocalUserCpu": 0.0d, "LastJobStatus": 2, "BufferBlockSize": 32768, "CommittedTime": 27491, "ExecutableSize_RAW": 6, "LastSuspensionTime": 0, "Matlab": "R2011b", "UserLog": "/home/xguo23/finally_2/Simulation_condor/model_3/82/process.log", "
 DAGManNodesMask": "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27", "CumulativeSlotTime": 27491.0d, "LastJobLeaseRenewal": 1446133781, "MachineAttrSlotWeight0": 1, "NumSystemHolds": 0, "BytesRecvd": 285053.0d, "CondorPlatform": "$CondorPlatform: X86_64-RedHat_6.6 $", "JOBGLIDEIN_ResourceName": "$$([IfThenElse(IsUndefined(TARGET.GLIDEIN_ResourceName), IfThenElse(IsUndefined(TARGET.GLIDEIN_Site), \"wisc.edu\", TARGET.GLIDEIN_Site), TARGET.GLIDEIN_ResourceName)])", "DAGNodeName": "82+82", "PeriodicRelease": "null( JobStatus == 5 ) && ( ( CurrentTime - EnteredCurrentStatus ) > 1800 ) && ( JobRunCount < 5 ) && ( HoldReasonCode != 6 ) && ( HoldReasonCode != 14 ) && ( HoldReasonCode != 22 );\nnull", "JobRunCount": 1, "LastRemoteHost": "slot1@e433.chtc.wisc.edu", "JobPrio": 0, "LocalSysCpu": 0.0d, "ExecutableSize": 7, "RemoteSysCpu": 173.0d, "TransferInput": "/home/xguo23/finally_2/Simulation_condor/data/82/,/home/xguo23/finally_2/Simulation_condor/data/shared/", "PeriodicHold": false, "WantRemoteIO
 ": true, "CommittedSuspensionTime": 0, "DAGParentNodeNames": "", "CompletionDate": 1446133781, "StreamErr": false, "RecentBlockReadKbytes": 0, "WantRemoteSyscalls": false, "NumShadowStarts": 1, "MachineAttrCpus0": 1, "DAGManNodesLog": "/home/xguo23/finally_2/Simulation_condor/model_3/./mydag.dag.nodes.log", "Owner": "xguo23", "Requirements": "null( ( OpSysMajorVer is 6 ) ) && ( MY.JobUniverse == 12 || MY.JobUniverse == 7 || MY.WantFlocking || MY.WantGlidein || TARGET.PoolName == \"CHTC\" || TARGET.COLLECTOR_HOST_STRING == \"infopool.cs.wisc.edu\" ) && ( TARGET.Arch == \"X86_64\" ) && ( TARGET.OpSys == \"LINUX\" ) && ( TARGET.Disk >= RequestDisk ) && ( TARGET.Memory >= RequestMemory ) && ( TARGET.HasFileTransfer );\nnull", "DiskUsage": 1250000, "JobLeaseDuration": 2400, "ClusterId": 49581989, "BufferSize": 524288, "IsCHTCSubmit": true, "RecentStatsLifetimeStarter": 1200, "CommittedSlotTime": 27491.0d, "Args": "--type=Matlab --version=R2011b --cmdtorun=simu_condor --unique=82 -- 3", "
 Environment": "", "LastPublicClaimId": "<128.105.245.233:28601>#1443991451#13496#...", "Iwd": "/home/xguo23/finally_2/Simulation_condor/model_3/82", "QDate": 1446105374, "CurrentHosts": 0, "User": "xguo23@chtc.wisc.edu", "StreamOut": false }
- { "GlobalJobId": "submit-3.chtc.wisc.edu#49582049.0#1446105441", "StatsLifetimeStarter": 26296, "JobStartDate": 1446106482, "SubmitEventNotes": "DAG Node: 112+112", "JobStatus": 4, "LeaveJobInQueue": false, "StartdPrincipal": "execute-side@matchsession/128.105.245.245", "WantRHEL6": true, "OnExitRemove": true, "JobCurrentStartDate": 1446106482, "CoreSize": 0, "MATCH_EXP_JOBGLIDEIN_ResourceName": "wisc.edu", "Rank": 0.0d, "ExitStatus": 0, "ResidentSetSize": 150000, "RemoteWallClockTime": 26298.0d, "WantCheckpoint": false, "In": "/dev/null", "MaxHosts": 1, "RootDir": "/", "NumRestarts": 0, "RecentBlockWriteKbytes": 0, "DiskUsage_RAW": 1216669, "EnteredCurrentStatus": 1446132780, "ResidentSetSize_RAW": 126892, "RequestDisk": 1000000, "MyType": "Job", "PeriodicRemove": false, "Cmd": "/home/xguo23/finally_2/Simulation_condor/chtcjobwrapper", "CondorVersion": "$CondorVersion: 8.5.0 Sep 16 2015 BuildID: 341710 $", "ShouldTransferFiles": "YES", "TargetType": "Machine", "MinHosts": 1, "NumC
 kpts_RAW": 0, "RequestCpus": 1, "RemoteUserCpu": 26097.0d, "BlockWrites": 0, "NiceUser": false, "Out": "process.out", "ImageSize_RAW": 811948, "BytesSent": 31904.0d, "CumulativeSuspensionTime": 0, "TransferIn": false, "NumCkpts": 0, "Err": "process.err", "RecentBlockWrites": 0, "JobFinishedHookDone": 1446132780, "ProcId": 0, "ImageSize": 1000000, "JobUniverse": 5, "EncryptExecuteDirectory": false, "TransferInputSizeMB": 0, "RecentBlockReads": 0, "SpooledOutputFiles": "CURLTIME_2601607,ChtcWrapper112.out,AuditLog.112,simu_3_112.txt,harvest.log,112.out", "BlockWriteKbytes": 0, "WhenToTransferOutput": "ON_EXIT", "JobCurrentStartExecutingDate": 1446106484, "ExitBySignal": false, "LastMatchTime": 1446106482, "OnExitHold": false, "OrigMaxHosts": 1, "RequestMemory": 1000, "NumJobStarts": 1, "TerminationPending": true, "TotalSuspensions": 0, "BlockReads": 0, "DAGManJobId": 49581933, "MemoryUsage": "null( ( ResidentSetSize + 1023 ) / 1024 );\nnull", "ExitCode": 0, "JobNotification": 0, "Bloc
 kReadKbytes": 0, "NumJobMatches": 1, "LocalUserCpu": 0.0d, "LastJobStatus": 2, "BufferBlockSize": 32768, "CommittedTime": 26298, "ExecutableSize_RAW": 6, "LastSuspensionTime": 0, "Matlab": "R2011b", "UserLog": "/home/xguo23/finally_2/Simulation_condor/model_3/112/process.log", "DAGManNodesMask": "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27", "CumulativeSlotTime": 26298.0d, "LastJobLeaseRenewal": 1446132780, "MachineAttrSlotWeight0": 1, "NumSystemHolds": 0, "BytesRecvd": 285054.0d, "CondorPlatform": "$CondorPlatform: X86_64-RedHat_6.6 $", "JOBGLIDEIN_ResourceName": "$$([IfThenElse(IsUndefined(TARGET.GLIDEIN_ResourceName), IfThenElse(IsUndefined(TARGET.GLIDEIN_Site), \"wisc.edu\", TARGET.GLIDEIN_Site), TARGET.GLIDEIN_ResourceName)])", "DAGNodeName": "112+112", "PeriodicRelease": "null( JobStatus == 5 ) && ( ( CurrentTime - EnteredCurrentStatus ) > 1800 ) && ( JobRunCount < 5 ) && ( HoldReasonCode != 6 ) && ( HoldReasonCode != 14 ) && ( HoldReasonCode != 22 );\nnull", "JobRunCount": 1, "Last
 RemoteHost": "slot1@e445.chtc.WISC.EDU", "JobPrio": 0, "LocalSysCpu": 0.0d, "ExecutableSize": 7, "RemoteSysCpu": 164.0d, "TransferInput": "/home/xguo23/finally_2/Simulation_condor/data/112/,/home/xguo23/finally_2/Simulation_condor/data/shared/", "PeriodicHold": false, "WantRemoteIO": true, "CommittedSuspensionTime": 0, "DAGParentNodeNames": "", "CompletionDate": 1446132780, "StreamErr": false, "RecentBlockReadKbytes": 0, "WantRemoteSyscalls": false, "NumShadowStarts": 1, "MachineAttrCpus0": 1, "DAGManNodesLog": "/home/xguo23/finally_2/Simulation_condor/model_3/./mydag.dag.nodes.log", "Owner": "xguo23", "Requirements": "null( ( OpSysMajorVer is 6 ) ) && ( MY.JobUniverse == 12 || MY.JobUniverse == 7 || MY.WantFlocking || MY.WantGlidein || TARGET.PoolName == \"CHTC\" || TARGET.COLLECTOR_HOST_STRING == \"infopool.cs.wisc.edu\" ) && ( TARGET.Arch == \"X86_64\" ) && ( TARGET.OpSys == \"LINUX\" ) && ( TARGET.Disk >= RequestDisk ) && ( TARGET.Memory >= RequestMemory ) && ( TARGET.HasFileTra
 nsfer );\nnull", "DiskUsage": 1250000, "JobLeaseDuration": 2400, "ClusterId": 49582049, "BufferSize": 524288, "IsCHTCSubmit": true, "RecentStatsLifetimeStarter": 1200, "CommittedSlotTime": 26298.0d, "Args": "--type=Matlab --version=R2011b --cmdtorun=simu_condor --unique=112 -- 3", "Environment": "", "LastPublicClaimId": "<128.105.245.245:48407>#1443991450#14631#...", "Iwd": "/home/xguo23/finally_2/Simulation_condor/model_3/112", "QDate": 1446105441, "CurrentHosts": 0, "User": "xguo23@chtc.wisc.edu", "StreamOut": false }
- { "GlobalJobId": "submit-3.chtc.wisc.edu#49582050.0#1446105441", "StatsLifetimeStarter": 27141, "JobStartDate": 1446106482, "SubmitEventNotes": "DAG Node: 301+301", "JobStatus": 4, "LeaveJobInQueue": false, "StartdPrincipal": "execute-side@matchsession/128.105.245.172", "WantRHEL6": true, "OnExitRemove": true, "JobCurrentStartDate": 1446106482, "CoreSize": 0, "MATCH_EXP_JOBGLIDEIN_ResourceName": "wisc.edu", "Rank": 0.0d, "ExitStatus": 0, "ResidentSetSize": 150000, "RemoteWallClockTime": 27143.0d, "WantCheckpoint": false, "In": "/dev/null", "MaxHosts": 1, "RootDir": "/", "NumRestarts": 0, "RecentBlockWriteKbytes": 0, "DiskUsage_RAW": 1216669, "EnteredCurrentStatus": 1446133625, "ResidentSetSize_RAW": 126464, "RequestDisk": 1000000, "MyType": "Job", "PeriodicRemove": false, "Cmd": "/home/xguo23/finally_2/Simulation_condor/chtcjobwrapper", "CondorVersion": "$CondorVersion: 8.5.0 Sep 16 2015 BuildID: 341710 $", "ShouldTransferFiles": "YES", "TargetType": "Machine", "MinHosts": 1, "NumC
 kpts_RAW": 0, "RequestCpus": 1, "RemoteUserCpu": 26895.0d, "BlockWrites": 0, "NiceUser": false, "Out": "process.out", "ImageSize_RAW": 811948, "BytesSent": 31905.0d, "CumulativeSuspensionTime": 0, "TransferIn": false, "NumCkpts": 0, "Err": "process.err", "RecentBlockWrites": 0, "JobFinishedHookDone": 1446133625, "ProcId": 0, "ImageSize": 1000000, "JobUniverse": 5, "EncryptExecuteDirectory": false, "TransferInputSizeMB": 0, "RecentBlockReads": 0, "SpooledOutputFiles": "CURLTIME_2158419,ChtcWrapper301.out,AuditLog.301,simu_3_301.txt,harvest.log,301.out", "BlockWriteKbytes": 0, "WhenToTransferOutput": "ON_EXIT", "JobCurrentStartExecutingDate": 1446106484, "ExitBySignal": false, "LastMatchTime": 1446106482, "OnExitHold": false, "OrigMaxHosts": 1, "RequestMemory": 1000, "NumJobStarts": 1, "TerminationPending": true, "TotalSuspensions": 0, "BlockReads": 0, "DAGManJobId": 49581933, "MemoryUsage": "null( ( ResidentSetSize + 1023 ) / 1024 );\nnull", "ExitCode": 0, "JobNotification": 0, "Bloc
 kReadKbytes": 0, "NumJobMatches": 1, "LocalUserCpu": 0.0d, "LastJobStatus": 2, "BufferBlockSize": 32768, "CommittedTime": 27143, "ExecutableSize_RAW": 6, "LastSuspensionTime": 0, "Matlab": "R2011b", "UserLog": "/home/xguo23/finally_2/Simulation_condor/model_3/301/process.log", "DAGManNodesMask": "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27", "CumulativeSlotTime": 27143.0d, "LastJobLeaseRenewal": 1446133625, "MachineAttrSlotWeight0": 1, "NumSystemHolds": 0, "BytesRecvd": 285054.0d, "CondorPlatform": "$CondorPlatform: X86_64-RedHat_6.6 $", "JOBGLIDEIN_ResourceName": "$$([IfThenElse(IsUndefined(TARGET.GLIDEIN_ResourceName), IfThenElse(IsUndefined(TARGET.GLIDEIN_Site), \"wisc.edu\", TARGET.GLIDEIN_Site), TARGET.GLIDEIN_ResourceName)])", "DAGNodeName": "301+301", "PeriodicRelease": "null( JobStatus == 5 ) && ( ( CurrentTime - EnteredCurrentStatus ) > 1800 ) && ( JobRunCount < 5 ) && ( HoldReasonCode != 6 ) && ( HoldReasonCode != 14 ) && ( HoldReasonCode != 22 );\nnull", "JobRunCount": 1, "Last
 RemoteHost": "slot1@e372.chtc.wisc.edu", "JobPrio": 0, "LocalSysCpu": 0.0d, "ExecutableSize": 7, "RemoteSysCpu": 201.0d, "TransferInput": "/home/xguo23/finally_2/Simulation_condor/data/301/,/home/xguo23/finally_2/Simulation_condor/data/shared/", "PeriodicHold": false, "WantRemoteIO": true, "CommittedSuspensionTime": 0, "DAGParentNodeNames": "", "CompletionDate": 1446133625, "StreamErr": false, "RecentBlockReadKbytes": 0, "WantRemoteSyscalls": false, "NumShadowStarts": 1, "MachineAttrCpus0": 1, "DAGManNodesLog": "/home/xguo23/finally_2/Simulation_condor/model_3/./mydag.dag.nodes.log", "Owner": "xguo23", "Requirements": "null( ( OpSysMajorVer is 6 ) ) && ( MY.JobUniverse == 12 || MY.JobUniverse == 7 || MY.WantFlocking || MY.WantGlidein || TARGET.PoolName == \"CHTC\" || TARGET.COLLECTOR_HOST_STRING == \"infopool.cs.wisc.edu\" ) && ( TARGET.Arch == \"X86_64\" ) && ( TARGET.OpSys == \"LINUX\" ) && ( TARGET.Disk >= RequestDisk ) && ( TARGET.Memory >= RequestMemory ) && ( TARGET.HasFileTra
 nsfer );\nnull", "DiskUsage": 1250000, "JobLeaseDuration": 2400, "ClusterId": 49582050, "BufferSize": 524288, "IsCHTCSubmit": true, "RecentStatsLifetimeStarter": 1200, "CommittedSlotTime": 27143.0d, "Args": "--type=Matlab --version=R2011b --cmdtorun=simu_condor --unique=301 -- 3", "Environment": "", "LastPublicClaimId": "<128.105.245.172:19856>#1444760019#9307#...", "Iwd": "/home/xguo23/finally_2/Simulation_condor/model_3/301", "QDate": 1446105441, "CurrentHosts": 0, "User": "xguo23@chtc.wisc.edu", "StreamOut": false }
- { "GlobalJobId": "submit-3.chtc.wisc.edu#49582065.0#1446105458", "StatsLifetimeStarter": 25606, "JobStartDate": 1446107042, "SubmitEventNotes": "DAG Node: 401+401", "JobStatus": 4, "LeaveJobInQueue": false, "AutoClusterId": 24, "StartdPrincipal": "execute-side@matchsession/128.105.245.206", "WantRHEL6": true, "OnExitRemove": true, "JobCurrentStartDate": 1446107042, "CoreSize": 0, "MATCH_EXP_JOBGLIDEIN_ResourceName": "wisc.edu", "ExitStatus": 0, "Rank": 0.0d, "ResidentSetSize": 150000, "RemoteWallClockTime": 25607.0d, "WantCheckpoint": false, "In": "/dev/null", "MaxHosts": 1, "RootDir": "/", "NumRestarts": 0, "RecentBlockWriteKbytes": 0, "DiskUsage_RAW": 1216669, "EnteredCurrentStatus": 1446132649, "ResidentSetSize_RAW": 126608, "RequestDisk": 1000000, "MyType": "Job", "PeriodicRemove": false, "Cmd": "/home/xguo23/finally_2/Simulation_condor/chtcjobwrapper", "CondorVersion": "$CondorVersion: 8.5.0 Sep 16 2015 BuildID: 341710 $", "ShouldTransferFiles": "YES", "TargetType": "Machine",
  "MinHosts": 1, "NumCkpts_RAW": 0, "RequestCpus": 1, "RemoteUserCpu": 25478.0d, "BlockWrites": 0, "NiceUser": false, "Out": "process.out", "ImageSize_RAW": 811948, "BytesSent": 30661.0d, "CumulativeSuspensionTime": 0, "TransferIn": false, "AutoClusterAttrs": "JobUniverse,LastCheckpointPlatform,NumCkpts,ClientMachine,_condor_RequestCpus,_condor_RequestDisk,_condor_RequestGPUs,_condor_RequestMemory,RequestCpus,RequestDisk,RequestGPUs,RequestMemory,BIOCHEM,MachineLastMatchTime,ConcurrencyLimits,NiceUser,Rank,Requirements,ImageSize,MemoryRequirements,User,RemoteGroup,SubmitterGroup,SubmitterUserPrio,Group,WIDsTheme,InteractiveJob,Is_Resumable,WantFlocking,WantGlidein,Scheduler,Owner,JobStart,MemoryUsage,IsExpressQueueJob,DiskUsage,HEP_VO,IsDesktop,OSG_VO,x509userproxysubject,PassedTest,IsLocalCMSJob,IsLocalCMSSlot,IsSAMSlot,IsSAMJob,MaxDiskTempC,IsDedicated,estimated_run_hours,IsCHTCSubmit,RequiresCVMFS,DynamicSlot,PartitionableSlot,Slot1_ExpectedMachineGracefulDrainingCompletion,Slot1_
 JobStarts,Slot1_SelfMonitorAge,Slot2_ExpectedMachineGracefulDrainingCompletion,Slot2_JobStarts,Slot2_SelfMonitorAge,Slot3_ExpectedMachineGracefulDrainingCompletion,Slot3_JobStarts,Slot3_SelfMonitorAge,Slot4_ExpectedMachineGracefulDrainingCompletion,Slot4_JobStarts,Slot4_SelfMonitorAge,Slot5_ExpectedMachineGracefulDrainingCompletion,Slot5_JobStarts,Slot5_SelfMonitorAge,Slot6_ExpectedMachineGracefulDrainingCompletion,Slot6_JobStarts,Slot6_SelfMonitorAge,Slot7_ExpectedMachineGracefulDrainingCompletion,Slot7_JobStarts,Slot7_SelfMonitorAge,Slot8_ExpectedMachineGracefulDrainingCompletion,Slot8_JobStarts,Slot8_SelfMonitorAge,Slot1_TotalTimeClaimedBusy,Slot1_TotalTimeUnclaimedIdle,Slot2_TotalTimeClaimedBusy,Slot2_TotalTimeUnclaimedIdle,Slot3_TotalTimeClaimedBusy,Slot3_TotalTimeUnclaimedIdle,Slot4_TotalTimeClaimedBusy,Slot4_TotalTimeUnclaimedIdle,Slot5_TotalTimeClaimedBusy,Slot5_TotalTimeUnclaimedIdle,Slot6_TotalTimeClaimedBusy,Slot6_TotalTimeUnclaimedIdle,Slot7_TotalTimeClaimedBusy,Slot7_To
 talTimeUnclaimedIdle,Slot8_TotalTimeClaimedBusy,Slot8_TotalTimeUnclaimedIdle,Slot10_ExpectedMachineGracefulDrainingCompletion,Slot10_JobStarts,Slot10_SelfMonitorAge,Slot11_ExpectedMachineGracefulDrainingCompletion,Slot11_JobStarts,Slot11_SelfMonitorAge,Slot12_ExpectedMachineGracefulDrainingCompletion,Slot12_JobStarts,Slot12_SelfMonitorAge,Slot9_ExpectedMachineGracefulDrainingCompletion,Slot9_JobStarts,Slot9_SelfMonitorAge,Slot12_TotalTimeClaimedBusy,Slot10_TotalTimeClaimedBusy,Slot10_TotalTimeUnclaimedIdle,Slot11_TotalTimeClaimedBusy,Slot11_TotalTimeUnclaimedIdle,Slot12_TotalTimeUnclaimedIdle,Slot9_TotalTimeClaimedBusy,Slot9_TotalTimeUnclaimedIdle,Slot13_ExpectedMachineGracefulDrainingCompletion,Slot13_JobStarts,Slot13_SelfMonitorAge,Slot14_ExpectedMachineGracefulDrainingCompletion,Slot14_JobStarts,Slot14_SelfMonitorAge,Slot15_ExpectedMachineGracefulDrainingCompletion,Slot15_JobStarts,Slot15_SelfMonitorAge,Slot16_ExpectedMachineGracefulDrainingCompletion,Slot16_JobStarts,Slot16_Self
 MonitorAge,IsResumable,WHEN_TO_TRANSFER_OUTPUT,_condor_Requestadmin_mutex_1,_condor_Requestadmin_mutex_2,_condor_Requestadmin_mutex_3,_condor_Requestmachine_token,Requestadmin_mutex_1,Requestadmin_mutex_2,Requestadmin_mutex_3,Requestmachine_token,nyehle,IsBuildJob,IsMatlabBuildJob,TotalJobRunTime,NodeOnline,Slot13_TotalTimeClaimedBusy,Slot13_TotalTimeUnclaimedIdle,Slot14_TotalTimeClaimedBusy,Slot14_TotalTimeUnclaimedIdle,Slot15_TotalTimeClaimedBusy,Slot15_TotalTimeUnclaimedIdle,Slot16_TotalTimeClaimedBusy,Slot16_TotalTimeUnclaimedIdle,TmpIsFull,trResumable,RequiresCMSFrontier,Slot17_ExpectedMachineGracefulDrainingCompletion,Slot17_JobStarts,Slot17_SelfMonitorAge,Slot17_TotalTimeClaimedBusy,Slot17_TotalTimeUnclaimedIdle,Slot18_ExpectedMachineGracefulDrainingCompletion,Slot18_JobStarts,Slot18_SelfMonitorAge,Slot18_TotalTimeClaimedBusy,Slot18_TotalTimeUnclaimedIdle,Slot19_ExpectedMachineGracefulDrainingCompletion,Slot19_JobStarts,Slot19_SelfMonitorAge,Slot19_TotalTimeClaimedBusy,Slot19
 _TotalTimeUnclaimedIdle,Slot20_ExpectedMachineGracefulDrainingCompletion,Slot20_JobStarts,Slot20_SelfMonitorAge,Slot20_TotalTimeClaimedBusy,Slot20_TotalTimeUnclaimedIdle,Slot21_ExpectedMachineGracefulDrainingCompletion,Slot21_JobStarts,Slot21_SelfMonitorAge,Slot21_TotalTimeClaimedBusy,Slot21_TotalTimeUnclaimedIdle,Slot22_ExpectedMachineGracefulDrainingCompletion,Slot22_JobStarts,Slot22_SelfMonitorAge,Slot22_TotalTimeClaimedBusy,Slot22_TotalTimeUnclaimedIdle,Slot23_ExpectedMachineGracefulDrainingCompletion,Slot23_JobStarts,Slot23_SelfMonitorAge,Slot23_TotalTimeClaimedBusy,Slot23_TotalTimeUnclaimedIdle,Slot24_ExpectedMachineGracefulDrainingCompletion,Slot24_JobStarts,Slot24_SelfMonitorAge,Slot24_TotalTimeClaimedBusy,Slot24_TotalTimeUnclaimedIdle,Slot25_ExpectedMachineGracefulDrainingCompletion,Slot25_JobStarts,Slot25_SelfMonitorAge,Slot25_TotalTimeClaimedBusy,Slot25_TotalTimeUnclaimedIdle,Slot26_ExpectedMachineGracefulDrainingCompletion,Slot26_JobStarts,Slot26_SelfMonitorAge,Slot26_To
 talTimeClaimedBusy,Slot26_TotalTimeUnclaimedIdle,Slot27_ExpectedMachineGracefulDrainingCompletion,Slot27_JobStarts,Slot27_SelfMonitorAge,Slot27_TotalTimeClaimedBusy,Slot27_TotalTimeUnclaimedIdle,Slot28_ExpectedMachineGracefulDrainingCompletion,Slot28_JobStarts,Slot28_SelfMonitorAge,Slot28_TotalTimeClaimedBusy,Slot28_TotalTimeUnclaimedIdle,Slot29_ExpectedMachineGracefulDrainingCompletion,Slot29_JobStarts,Slot29_SelfMonitorAge,Slot29_TotalTimeClaimedBusy,Slot29_TotalTimeUnclaimedIdle,Slot30_ExpectedMachineGracefulDrainingCompletion,Slot30_JobStarts,Slot30_SelfMonitorAge,Slot30_TotalTimeClaimedBusy,Slot30_TotalTimeUnclaimedIdle,Slot31_ExpectedMachineGracefulDrainingCompletion,Slot31_JobStarts,Slot31_SelfMonitorAge,Slot31_TotalTimeClaimedBusy,Slot31_TotalTimeUnclaimedIdle,Slot32_ExpectedMachineGracefulDrainingCompletion,Slot32_JobStarts,Slot32_SelfMonitorAge,Slot32_TotalTimeClaimedBusy,Slot32_TotalTimeUnclaimedIdle,ResidentSetSize", "NumCkpts": 0, "Err": "process.err", "RecentBlockWrite
 s": 0, "JobFinishedHookDone": 1446132649, "ProcId": 0, "ImageSize": 1000000, "JobUniverse": 5, "EncryptExecuteDirectory": false, "TransferInputSizeMB": 0, "RecentBlockReads": 0, "SpooledOutputFiles": "CURLTIME_1160521,ChtcWrapper401.out,AuditLog.401,simu_3_401.txt,harvest.log,401.out", "BlockWriteKbytes": 0, "WhenToTransferOutput": "ON_EXIT", "JobCurrentStartExecutingDate": 1446107043, "ExitBySignal": false, "LastMatchTime": 1446107042, "OnExitHold": false, "OrigMaxHosts": 1, "RequestMemory": 1000, "NumJobStarts": 1, "TerminationPending": true, "TotalSuspensions": 0, "BlockReads": 0, "DAGManJobId": 49581933, "MemoryUsage": "null( ( ResidentSetSize + 1023 ) / 1024 );\nnull", "ExitCode": 0, "JobNotification": 0, "BlockReadKbytes": 0, "NumJobMatches": 1, "LocalUserCpu": 0.0d, "LastJobStatus": 2, "BufferBlockSize": 32768, "CommittedTime": 25607, "ExecutableSize_RAW": 6, "LastSuspensionTime": 0, "Matlab": "R2011b", "UserLog": "/home/xguo23/finally_2/Simulation_condor/model_3/401/process.
 log", "DAGManNodesMask": "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27", "CumulativeSlotTime": 25607.0d, "LastJobLeaseRenewal": 1446132649, "MachineAttrSlotWeight0": 1, "NumSystemHolds": 0, "BytesRecvd": 285054.0d, "CondorPlatform": "$CondorPlatform: X86_64-RedHat_6.6 $", "JOBGLIDEIN_ResourceName": "$$([IfThenElse(IsUndefined(TARGET.GLIDEIN_ResourceName), IfThenElse(IsUndefined(TARGET.GLIDEIN_Site), \"wisc.edu\", TARGET.GLIDEIN_Site), TARGET.GLIDEIN_ResourceName)])", "DAGNodeName": "401+401", "PeriodicRelease": "null( JobStatus == 5 ) && ( ( CurrentTime - EnteredCurrentStatus ) > 1800 ) && ( JobRunCount < 5 ) && ( HoldReasonCode != 6 ) && ( HoldReasonCode != 14 ) && ( HoldReasonCode != 22 );\nnull", "JobRunCount": 1, "LastRemoteHost": "slot1@e406.chtc.wisc.edu", "JobPrio": 0, "LocalSysCpu": 0.0d, "ExecutableSize": 7, "RemoteSysCpu": 89.0d, "TransferInput": "/home/xguo23/finally_2/Simulation_condor/data/401/,/home/xguo23/finally_2/Simulation_condor/data/shared/", "PeriodicHold": false, "Wan
 tRemoteIO": true, "CommittedSuspensionTime": 0, "DAGParentNodeNames": "", "CompletionDate": 1446132649, "StreamErr": false, "RecentBlockReadKbytes": 0, "WantRemoteSyscalls": false, "NumShadowStarts": 1, "MachineAttrCpus0": 1, "DAGManNodesLog": "/home/xguo23/finally_2/Simulation_condor/model_3/./mydag.dag.nodes.log", "Owner": "xguo23", "Requirements": "null( ( OpSysMajorVer is 6 ) ) && ( MY.JobUniverse == 12 || MY.JobUniverse == 7 || MY.WantFlocking || MY.WantGlidein || TARGET.PoolName == \"CHTC\" || TARGET.COLLECTOR_HOST_STRING == \"infopool.cs.wisc.edu\" ) && ( TARGET.Arch == \"X86_64\" ) && ( TARGET.OpSys == \"LINUX\" ) && ( TARGET.Disk >= RequestDisk ) && ( TARGET.Memory >= RequestMemory ) && ( TARGET.HasFileTransfer );\nnull", "DiskUsage": 1250000, "JobLeaseDuration": 2400, "ClusterId": 49582065, "BufferSize": 524288, "IsCHTCSubmit": true, "RecentStatsLifetimeStarter": 1200, "CommittedSlotTime": 25607.0d, "Args": "--type=Matlab --version=R2011b --cmdtorun=simu_condor --unique=40
 1 -- 3", "Environment": "", "LastPublicClaimId": "<128.105.245.206:27946>#1443991437#15826#...", "Iwd": "/home/xguo23/finally_2/Simulation_condor/model_3/401", "QDate": 1446105458, "CurrentHosts": 0, "User": "xguo23@chtc.wisc.edu", "StreamOut": false }
- { "GlobalJobId": "submit-3.chtc.wisc.edu#49582094.0#1446105491", "StatsLifetimeStarter": 25168, "JobStartDate": 1446107489, "SubmitEventNotes": "DAG Node: 106+106", "JobStatus": 4, "LeaveJobInQueue": false, "StartdPrincipal": "execute-side@matchsession/128.104.55.83", "WantRHEL6": true, "OnExitRemove": true, "JobCurrentStartDate": 1446107489, "CoreSize": 0, "MATCH_EXP_JOBGLIDEIN_ResourceName": "wisc.edu", "Rank": 0.0d, "ExitStatus": 0, "ResidentSetSize": 75000, "RemoteWallClockTime": 25169.0d, "WantCheckpoint": false, "In": "/dev/null", "MaxHosts": 1, "RootDir": "/", "NumRestarts": 0, "RecentBlockWriteKbytes": 4, "DiskUsage_RAW": 1216669, "EnteredCurrentStatus": 1446132658, "ResidentSetSize_RAW": 72016, "RequestDisk": 1000000, "MyType": "Job", "PeriodicRemove": false, "Cmd": "/home/xguo23/finally_2/Simulation_condor/chtcjobwrapper", "CondorVersion": "$CondorVersion: 8.5.0 Sep 16 2015 BuildID: 341710 $", "ShouldTransferFiles": "YES", "TargetType": "Machine", "MinHosts": 1, "NumCkpts
 _RAW": 0, "RequestCpus": 1, "RemoteUserCpu": 24949.0d, "BlockWrites": 1, "NiceUser": false, "Out": "process.out", "ImageSize_RAW": 119520, "BytesSent": 30486.0d, "CumulativeSuspensionTime": 0, "TransferIn": false, "NumCkpts": 0, "Err": "process.err", "RecentBlockWrites": 1, "JobFinishedHookDone": 1446132658, "ProcId": 0, "ImageSize": 125000, "JobUniverse": 5, "EncryptExecuteDirectory": false, "TransferInputSizeMB": 0, "RecentBlockReads": 86, "SpooledOutputFiles": "CURLTIME_122139,ChtcWrapper106.out,AuditLog.106,simu_3_106.txt,harvest.log,106.out", "BlockWriteKbytes": 4, "WhenToTransferOutput": "ON_EXIT", "JobCurrentStartExecutingDate": 1446107490, "ExitBySignal": false, "LastMatchTime": 1446107489, "OnExitHold": false, "OrigMaxHosts": 1, "RequestMemory": 1000, "NumJobStarts": 1, "TerminationPending": true, "TotalSuspensions": 0, "BlockReads": 665, "DAGManJobId": 49581933, "MemoryUsage": "null( ( ResidentSetSize + 1023 ) / 1024 );\nnull", "ExitCode": 0, "JobNotification": 0, "BlockRe
 adKbytes": 26620, "NumJobMatches": 1, "LocalUserCpu": 0.0d, "LastJobStatus": 2, "BufferBlockSize": 32768, "CommittedTime": 25169, "ExecutableSize_RAW": 6, "LastSuspensionTime": 0, "Matlab": "R2011b", "UserLog": "/home/xguo23/finally_2/Simulation_condor/model_3/106/process.log", "DAGManNodesMask": "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27", "CumulativeSlotTime": 25169.0d, "LastJobLeaseRenewal": 1446132658, "MachineAttrSlotWeight0": 1, "NumSystemHolds": 0, "BytesRecvd": 285054.0d, "CondorPlatform": "$CondorPlatform: X86_64-RedHat_6.6 $", "JOBGLIDEIN_ResourceName": "$$([IfThenElse(IsUndefined(TARGET.GLIDEIN_ResourceName), IfThenElse(IsUndefined(TARGET.GLIDEIN_Site), \"wisc.edu\", TARGET.GLIDEIN_Site), TARGET.GLIDEIN_ResourceName)])", "DAGNodeName": "106+106", "PeriodicRelease": "null( JobStatus == 5 ) && ( ( CurrentTime - EnteredCurrentStatus ) > 1800 ) && ( JobRunCount < 5 ) && ( HoldReasonCode != 6 ) && ( HoldReasonCode != 14 ) && ( HoldReasonCode != 22 );\nnull", "JobRunCount": 1, "Las
 tRemoteHost": "slot1@c064.chtc.wisc.edu", "JobPrio": 0, "LocalSysCpu": 0.0d, "ExecutableSize": 7, "RemoteSysCpu": 204.0d, "TransferInput": "/home/xguo23/finally_2/Simulation_condor/data/106/,/home/xguo23/finally_2/Simulation_condor/data/shared/", "PeriodicHold": false, "WantRemoteIO": true, "CommittedSuspensionTime": 0, "DAGParentNodeNames": "", "CompletionDate": 1446132658, "StreamErr": false, "RecentBlockReadKbytes": 960, "WantRemoteSyscalls": false, "NumShadowStarts": 1, "MachineAttrCpus0": 1, "DAGManNodesLog": "/home/xguo23/finally_2/Simulation_condor/model_3/./mydag.dag.nodes.log", "Owner": "xguo23", "Requirements": "null( ( OpSysMajorVer is 6 ) ) && ( MY.JobUniverse == 12 || MY.JobUniverse == 7 || MY.WantFlocking || MY.WantGlidein || TARGET.PoolName == \"CHTC\" || TARGET.COLLECTOR_HOST_STRING == \"infopool.cs.wisc.edu\" ) && ( TARGET.Arch == \"X86_64\" ) && ( TARGET.OpSys == \"LINUX\" ) && ( TARGET.Disk >= RequestDisk ) && ( TARGET.Memory >= RequestMemory ) && ( TARGET.HasFile
 Transfer );\nnull", "DiskUsage": 1250000, "JobLeaseDuration": 2400, "ClusterId": 49582094, "BufferSize": 524288, "IsCHTCSubmit": true, "RecentStatsLifetimeStarter": 1200, "CommittedSlotTime": 25169.0d, "Args": "--type=Matlab --version=R2011b --cmdtorun=simu_condor --unique=106 -- 3", "Environment": "", "LastPublicClaimId": "<128.104.55.83:25899>#1445308581#1240#...", "Iwd": "/home/xguo23/finally_2/Simulation_condor/model_3/106", "QDate": 1446105491, "CurrentHosts": 0, "User": "xguo23@chtc.wisc.edu", "StreamOut": false }
- { "GlobalJobId": "submit-3.chtc.wisc.edu#49582098.0#1446105492", "StatsLifetimeStarter": 26020, "JobStartDate": 1446107489, "SubmitEventNotes": "DAG Node: 304+304", "JobStatus": 4, "LeaveJobInQueue": false, "StartdPrincipal": "execute-side@matchsession/128.105.245.223", "WantRHEL6": true, "OnExitRemove": true, "JobCurrentStartDate": 1446107489, "CoreSize": 0, "MATCH_EXP_JOBGLIDEIN_ResourceName": "wisc.edu", "Rank": 0.0d, "ExitStatus": 0, "ResidentSetSize": 150000, "RemoteWallClockTime": 26022.0d, "WantCheckpoint": false, "In": "/dev/null", "MaxHosts": 1, "RootDir": "/", "NumRestarts": 0, "RecentBlockWriteKbytes": 0, "DiskUsage_RAW": 1216669, "EnteredCurrentStatus": 1446133511, "ResidentSetSize_RAW": 128776, "RequestDisk": 1000000, "MyType": "Job", "PeriodicRemove": false, "Cmd": "/home/xguo23/finally_2/Simulation_condor/chtcjobwrapper", "CondorVersion": "$CondorVersion: 8.5.0 Sep 16 2015 BuildID: 341710 $", "ShouldTransferFiles": "YES", "TargetType": "Machine", "MinHosts": 1, "NumC
 kpts_RAW": 0, "RequestCpus": 1, "RemoteUserCpu": 25844.0d, "BlockWrites": 0, "NiceUser": false, "Out": "process.out", "ImageSize_RAW": 811948, "BytesSent": 31801.0d, "CumulativeSuspensionTime": 0, "TransferIn": false, "NumCkpts": 0, "Err": "process.err", "RecentBlockWrites": 0, "JobFinishedHookDone": 1446133511, "ProcId": 0, "ImageSize": 1000000, "JobUniverse": 5, "EncryptExecuteDirectory": false, "TransferInputSizeMB": 0, "RecentBlockReads": 0, "SpooledOutputFiles": "CURLTIME_3651606,ChtcWrapper304.out,AuditLog.304,simu_3_304.txt,harvest.log,304.out", "BlockWriteKbytes": 0, "WhenToTransferOutput": "ON_EXIT", "JobCurrentStartExecutingDate": 1446107491, "ExitBySignal": false, "LastMatchTime": 1446107489, "OnExitHold": false, "OrigMaxHosts": 1, "RequestMemory": 1000, "NumJobStarts": 1, "TerminationPending": true, "TotalSuspensions": 0, "BlockReads": 0, "DAGManJobId": 49581933, "MemoryUsage": "null( ( ResidentSetSize + 1023 ) / 1024 );\nnull", "ExitCode": 0, "JobNotification": 0, "Bloc
 kReadKbytes": 0, "NumJobMatches": 1, "LocalUserCpu": 0.0d, "LastJobStatus": 2, "BufferBlockSize": 32768, "CommittedTime": 26022, "ExecutableSize_RAW": 6, "LastSuspensionTime": 0, "Matlab": "R2011b", "UserLog": "/home/xguo23/finally_2/Simulation_condor/model_3/304/process.log", "DAGManNodesMask": "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27", "CumulativeSlotTime": 26022.0d, "LastJobLeaseRenewal": 1446133511, "MachineAttrSlotWeight0": 1, "NumSystemHolds": 0, "BytesRecvd": 285054.0d, "CondorPlatform": "$CondorPlatform: X86_64-RedHat_6.6 $", "JOBGLIDEIN_ResourceName": "$$([IfThenElse(IsUndefined(TARGET.GLIDEIN_ResourceName), IfThenElse(IsUndefined(TARGET.GLIDEIN_Site), \"wisc.edu\", TARGET.GLIDEIN_Site), TARGET.GLIDEIN_ResourceName)])", "DAGNodeName": "304+304", "PeriodicRelease": "null( JobStatus == 5 ) && ( ( CurrentTime - EnteredCurrentStatus ) > 1800 ) && ( JobRunCount < 5 ) && ( HoldReasonCode != 6 ) && ( HoldReasonCode != 14 ) && ( HoldReasonCode != 22 );\nnull", "JobRunCount": 1, "Last
 RemoteHost": "slot1@e423.chtc.wisc.edu", "JobPrio": 0, "LocalSysCpu": 0.0d, "ExecutableSize": 7, "RemoteSysCpu": 143.0d, "TransferInput": "/home/xguo23/finally_2/Simulation_condor/data/304/,/home/xguo23/finally_2/Simulation_condor/data/shared/", "PeriodicHold": false, "WantRemoteIO": true, "CommittedSuspensionTime": 0, "DAGParentNodeNames": "", "CompletionDate": 1446133511, "StreamErr": false, "RecentBlockReadKbytes": 0, "WantRemoteSyscalls": false, "NumShadowStarts": 1, "MachineAttrCpus0": 1, "DAGManNodesLog": "/home/xguo23/finally_2/Simulation_condor/model_3/./mydag.dag.nodes.log", "Owner": "xguo23", "Requirements": "null( ( OpSysMajorVer is 6 ) ) && ( MY.JobUniverse == 12 || MY.JobUniverse == 7 || MY.WantFlocking || MY.WantGlidein || TARGET.PoolName == \"CHTC\" || TARGET.COLLECTOR_HOST_STRING == \"infopool.cs.wisc.edu\" ) && ( TARGET.Arch == \"X86_64\" ) && ( TARGET.OpSys == \"LINUX\" ) && ( TARGET.Disk >= RequestDisk ) && ( TARGET.Memory >= RequestMemory ) && ( TARGET.HasFileTra
 nsfer );\nnull", "DiskUsage": 1250000, "JobLeaseDuration": 2400, "ClusterId": 49582098, "BufferSize": 524288, "IsCHTCSubmit": true, "RecentStatsLifetimeStarter": 1200, "CommittedSlotTime": 26022.0d, "Args": "--type=Matlab --version=R2011b --cmdtorun=simu_condor --unique=304 -- 3", "Environment": "", "LastPublicClaimId": "<128.105.245.223:13467>#1444760039#6376#...", "Iwd": "/home/xguo23/finally_2/Simulation_condor/model_3/304", "QDate": 1446105492, "CurrentHosts": 0, "User": "xguo23@chtc.wisc.edu", "StreamOut": false }
- { "GlobalJobId": "submit-3.chtc.wisc.edu#49582113.0#1446105509", "StatsLifetimeStarter": 26044, "JobStartDate": 1446107490, "SubmitEventNotes": "DAG Node: 206+206", "JobStatus": 4, "LeaveJobInQueue": false, "StartdPrincipal": "execute-side@matchsession/128.105.245.120", "WantRHEL6": true, "OnExitRemove": true, "JobCurrentStartDate": 1446107490, "CoreSize": 0, "MATCH_EXP_JOBGLIDEIN_ResourceName": "wisc.edu", "Rank": 0.0d, "ExitStatus": 0, "ResidentSetSize": 150000, "RemoteWallClockTime": 26045.0d, "WantCheckpoint": false, "In": "/dev/null", "MaxHosts": 1, "RootDir": "/", "NumRestarts": 0, "RecentBlockWriteKbytes": 0, "DiskUsage_RAW": 1216669, "EnteredCurrentStatus": 1446133535, "ResidentSetSize_RAW": 126460, "RequestDisk": 1000000, "MyType": "Job", "PeriodicRemove": false, "Cmd": "/home/xguo23/finally_2/Simulation_condor/chtcjobwrapper", "CondorVersion": "$CondorVersion: 8.5.0 Sep 16 2015 BuildID: 341710 $", "ShouldTransferFiles": "YES", "TargetType": "Machine", "MinHosts": 1, "NumC
 kpts_RAW": 0, "RequestCpus": 1, "RemoteUserCpu": 25939.0d, "BlockWrites": 0, "NiceUser": false, "Out": "process.out", "ImageSize_RAW": 811948, "BytesSent": 30596.0d, "CumulativeSuspensionTime": 0, "TransferIn": false, "NumCkpts": 0, "Err": "process.err", "RecentBlockWrites": 0, "JobFinishedHookDone": 1446133535, "ProcId": 0, "ImageSize": 1000000, "JobUniverse": 5, "EncryptExecuteDirectory": false, "TransferInputSizeMB": 0, "RecentBlockReads": 0, "SpooledOutputFiles": "CURLTIME_522843,ChtcWrapper206.out,AuditLog.206,simu_3_206.txt,harvest.log,206.out", "BlockWriteKbytes": 0, "WhenToTransferOutput": "ON_EXIT", "JobCurrentStartExecutingDate": 1446107491, "ExitBySignal": false, "LastMatchTime": 1446107490, "OnExitHold": false, "OrigMaxHosts": 1, "RequestMemory": 1000, "NumJobStarts": 1, "TerminationPending": true, "TotalSuspensions": 0, "BlockReads": 0, "DAGManJobId": 49581933, "MemoryUsage": "null( ( ResidentSetSize + 1023 ) / 1024 );\nnull", "ExitCode": 0, "JobNotification": 0, "Block
 ReadKbytes": 0, "NumJobMatches": 1, "LocalUserCpu": 0.0d, "LastJobStatus": 2, "BufferBlockSize": 32768, "CommittedTime": 26045, "ExecutableSize_RAW": 6, "LastSuspensionTime": 0, "Matlab": "R2011b", "UserLog": "/home/xguo23/finally_2/Simulation_condor/model_3/206/process.log", "DAGManNodesMask": "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27", "CumulativeSlotTime": 26045.0d, "LastJobLeaseRenewal": 1446133535, "MachineAttrSlotWeight0": 1, "NumSystemHolds": 0, "BytesRecvd": 285054.0d, "CondorPlatform": "$CondorPlatform: X86_64-RedHat_6.6 $", "JOBGLIDEIN_ResourceName": "$$([IfThenElse(IsUndefined(TARGET.GLIDEIN_ResourceName), IfThenElse(IsUndefined(TARGET.GLIDEIN_Site), \"wisc.edu\", TARGET.GLIDEIN_Site), TARGET.GLIDEIN_ResourceName)])", "DAGNodeName": "206+206", "PeriodicRelease": "null( JobStatus == 5 ) && ( ( CurrentTime - EnteredCurrentStatus ) > 1800 ) && ( JobRunCount < 5 ) && ( HoldReasonCode != 6 ) && ( HoldReasonCode != 14 ) && ( HoldReasonCode != 22 );\nnull", "JobRunCount": 1, "LastR
 emoteHost": "slot1@e320.chtc.wisc.edu", "JobPrio": 0, "LocalSysCpu": 0.0d, "ExecutableSize": 7, "RemoteSysCpu": 87.0d, "TransferInput": "/home/xguo23/finally_2/Simulation_condor/data/206/,/home/xguo23/finally_2/Simulation_condor/data/shared/", "PeriodicHold": false, "WantRemoteIO": true, "CommittedSuspensionTime": 0, "DAGParentNodeNames": "", "CompletionDate": 1446133535, "StreamErr": false, "RecentBlockReadKbytes": 0, "WantRemoteSyscalls": false, "NumShadowStarts": 1, "MachineAttrCpus0": 1, "DAGManNodesLog": "/home/xguo23/finally_2/Simulation_condor/model_3/./mydag.dag.nodes.log", "Owner": "xguo23", "Requirements": "null( ( OpSysMajorVer is 6 ) ) && ( MY.JobUniverse == 12 || MY.JobUniverse == 7 || MY.WantFlocking || MY.WantGlidein || TARGET.PoolName == \"CHTC\" || TARGET.COLLECTOR_HOST_STRING == \"infopool.cs.wisc.edu\" ) && ( TARGET.Arch == \"X86_64\" ) && ( TARGET.OpSys == \"LINUX\" ) && ( TARGET.Disk >= RequestDisk ) && ( TARGET.Memory >= RequestMemory ) && ( TARGET.HasFileTrans
 fer );\nnull", "DiskUsage": 1250000, "JobLeaseDuration": 2400, "ClusterId": 49582113, "BufferSize": 524288, "IsCHTCSubmit": true, "RecentStatsLifetimeStarter": 1200, "CommittedSlotTime": 26045.0d, "Args": "--type=Matlab --version=R2011b --cmdtorun=simu_condor --unique=206 -- 3", "Environment": "", "LastPublicClaimId": "<128.105.245.120:45185>#1443991409#14238#...", "Iwd": "/home/xguo23/finally_2/Simulation_condor/model_3/206", "QDate": 1446105509, "CurrentHosts": 0, "User": "xguo23@chtc.wisc.edu", "StreamOut": false }
- { "GlobalJobId": "submit-3.chtc.wisc.edu#49582119.0#1446105519", "StatsLifetimeStarter": 24928, "JobStartDate": 1446107490, "SubmitEventNotes": "DAG Node: 152+152", "JobStatus": 4, "LeaveJobInQueue": false, "StartdPrincipal": "execute-side@matchsession/128.105.245.242", "WantRHEL6": true, "OnExitRemove": true, "JobCurrentStartDate": 1446107490, "CoreSize": 0, "MATCH_EXP_JOBGLIDEIN_ResourceName": "wisc.edu", "Rank": 0.0d, "ExitStatus": 0, "ResidentSetSize": 150000, "RemoteWallClockTime": 24930.0d, "WantCheckpoint": false, "In": "/dev/null", "MaxHosts": 1, "RootDir": "/", "NumRestarts": 0, "RecentBlockWriteKbytes": 0, "DiskUsage_RAW": 1216669, "EnteredCurrentStatus": 1446132420, "ResidentSetSize_RAW": 128972, "RequestDisk": 1000000, "MyType": "Job", "PeriodicRemove": false, "Cmd": "/home/xguo23/finally_2/Simulation_condor/chtcjobwrapper", "CondorVersion": "$CondorVersion: 8.5.0 Sep 16 2015 BuildID: 341710 $", "ShouldTransferFiles": "YES", "TargetType": "Machine", "MinHosts": 1, "NumC
 kpts_RAW": 0, "RequestCpus": 1, "RemoteUserCpu": 24742.0d, "BlockWrites": 0, "NiceUser": false, "Out": "process.out", "ImageSize_RAW": 811948, "BytesSent": 30431.0d, "CumulativeSuspensionTime": 0, "TransferIn": false, "NumCkpts": 0, "Err": "process.err", "RecentBlockWrites": 0, "JobFinishedHookDone": 1446132420, "ProcId": 0, "ImageSize": 1000000, "JobUniverse": 5, "EncryptExecuteDirectory": false, "TransferInputSizeMB": 0, "RecentBlockReads": 0, "SpooledOutputFiles": "CURLTIME_674,ChtcWrapper152.out,AuditLog.152,simu_3_152.txt,harvest.log,152.out", "BlockWriteKbytes": 0, "WhenToTransferOutput": "ON_EXIT", "JobCurrentStartExecutingDate": 1446107491, "ExitBySignal": false, "LastMatchTime": 1446107490, "OnExitHold": false, "OrigMaxHosts": 1, "RequestMemory": 1000, "NumJobStarts": 1, "TerminationPending": true, "TotalSuspensions": 0, "BlockReads": 0, "DAGManJobId": 49581933, "MemoryUsage": "null( ( ResidentSetSize + 1023 ) / 1024 );\nnull", "ExitCode": 0, "JobNotification": 0, "BlockRea
 dKbytes": 0, "NumJobMatches": 1, "LocalUserCpu": 0.0d, "LastJobStatus": 2, "BufferBlockSize": 32768, "CommittedTime": 24930, "ExecutableSize_RAW": 6, "LastSuspensionTime": 0, "Matlab": "R2011b", "UserLog": "/home/xguo23/finally_2/Simulation_condor/model_3/152/process.log", "DAGManNodesMask": "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27", "CumulativeSlotTime": 24930.0d, "LastJobLeaseRenewal": 1446132420, "MachineAttrSlotWeight0": 1, "NumSystemHolds": 0, "BytesRecvd": 285054.0d, "CondorPlatform": "$CondorPlatform: X86_64-RedHat_6.6 $", "JOBGLIDEIN_ResourceName": "$$([IfThenElse(IsUndefined(TARGET.GLIDEIN_ResourceName), IfThenElse(IsUndefined(TARGET.GLIDEIN_Site), \"wisc.edu\", TARGET.GLIDEIN_Site), TARGET.GLIDEIN_ResourceName)])", "DAGNodeName": "152+152", "PeriodicRelease": "null( JobStatus == 5 ) && ( ( CurrentTime - EnteredCurrentStatus ) > 1800 ) && ( JobRunCount < 5 ) && ( HoldReasonCode != 6 ) && ( HoldReasonCode != 14 ) && ( HoldReasonCode != 22 );\nnull", "JobRunCount": 1, "LastRemo
 teHost": "slot1@e442.chtc.WISC.EDU", "JobPrio": 0, "LocalSysCpu": 0.0d, "ExecutableSize": 7, "RemoteSysCpu": 156.0d, "TransferInput": "/home/xguo23/finally_2/Simulation_condor/data/152/,/home/xguo23/finally_2/Simulation_condor/data/shared/", "PeriodicHold": false, "WantRemoteIO": true, "CommittedSuspensionTime": 0, "DAGParentNodeNames": "", "CompletionDate": 1446132420, "StreamErr": false, "RecentBlockReadKbytes": 0, "WantRemoteSyscalls": false, "NumShadowStarts": 1, "MachineAttrCpus0": 1, "DAGManNodesLog": "/home/xguo23/finally_2/Simulation_condor/model_3/./mydag.dag.nodes.log", "Owner": "xguo23", "Requirements": "null( ( OpSysMajorVer is 6 ) ) && ( MY.JobUniverse == 12 || MY.JobUniverse == 7 || MY.WantFlocking || MY.WantGlidein || TARGET.PoolName == \"CHTC\" || TARGET.COLLECTOR_HOST_STRING == \"infopool.cs.wisc.edu\" ) && ( TARGET.Arch == \"X86_64\" ) && ( TARGET.OpSys == \"LINUX\" ) && ( TARGET.Disk >= RequestDisk ) && ( TARGET.Memory >= RequestMemory ) && ( TARGET.HasFileTransfe
 r );\nnull", "DiskUsage": 1250000, "JobLeaseDuration": 2400, "ClusterId": 49582119, "BufferSize": 524288, "IsCHTCSubmit": true, "RecentStatsLifetimeStarter": 1200, "CommittedSlotTime": 24930.0d, "Args": "--type=Matlab --version=R2011b --cmdtorun=simu_condor --unique=152 -- 3", "Environment": "", "LastPublicClaimId": "<128.105.245.242:38884>#1443991450#10374#...", "Iwd": "/home/xguo23/finally_2/Simulation_condor/model_3/152", "QDate": 1446105519, "CurrentHosts": 0, "User": "xguo23@chtc.wisc.edu", "StreamOut": false }
- { "GlobalJobId": "submit-3.chtc.wisc.edu#49582148.0#1446105547", "StatsLifetimeStarter": 26230, "JobStartDate": 1446107686, "SubmitEventNotes": "DAG Node: 162+162", "JobStatus": 4, "LeaveJobInQueue": false, "AutoClusterId": 24, "StartdPrincipal": "execute-side@matchsession/128.105.245.170", "WantRHEL6": true, "OnExitRemove": true, "JobCurrentStartDate": 1446107686, "CoreSize": 0, "MATCH_EXP_JOBGLIDEIN_ResourceName": "wisc.edu", "ExitStatus": 0, "Rank": 0.0d, "ResidentSetSize": 150000, "RemoteWallClockTime": 26233.0d, "WantCheckpoint": false, "In": "/dev/null", "MaxHosts": 1, "RootDir": "/", "NumRestarts": 0, "RecentBlockWriteKbytes": 0, "DiskUsage_RAW": 1216669, "EnteredCurrentStatus": 1446133919, "ResidentSetSize_RAW": 126384, "RequestDisk": 1000000, "MyType": "Job", "PeriodicRemove": false, "Cmd": "/home/xguo23/finally_2/Simulation_condor/chtcjobwrapper", "CondorVersion": "$CondorVersion: 8.5.0 Sep 16 2015 BuildID: 341710 $", "ShouldTransferFiles": "YES", "TargetType": "Machine",
  "MinHosts": 1, "NumCkpts_RAW": 0, "RequestCpus": 1, "RemoteUserCpu": 26088.0d, "BlockWrites": 0, "NiceUser": false, "Out": "process.out", "ImageSize_RAW": 811948, "BytesSent": 30612.0d, "CumulativeSuspensionTime": 0, "TransferIn": false, "AutoClusterAttrs": "JobUniverse,LastCheckpointPlatform,NumCkpts,ClientMachine,_condor_RequestCpus,_condor_RequestDisk,_condor_RequestGPUs,_condor_RequestMemory,RequestCpus,RequestDisk,RequestGPUs,RequestMemory,BIOCHEM,MachineLastMatchTime,ConcurrencyLimits,NiceUser,Rank,Requirements,ImageSize,MemoryRequirements,User,RemoteGroup,SubmitterGroup,SubmitterUserPrio,Group,WIDsTheme,InteractiveJob,Is_Resumable,WantFlocking,WantGlidein,Scheduler,Owner,JobStart,MemoryUsage,IsExpressQueueJob,DiskUsage,HEP_VO,IsDesktop,OSG_VO,x509userproxysubject,PassedTest,IsLocalCMSJob,IsLocalCMSSlot,IsSAMSlot,IsSAMJob,MaxDiskTempC,IsDedicated,estimated_run_hours,IsCHTCSubmit,RequiresCVMFS,DynamicSlot,PartitionableSlot,Slot1_ExpectedMachineGracefulDrainingCompletion,Slot1_
 JobStarts,Slot1_SelfMonitorAge,Slot2_ExpectedMachineGracefulDrainingCompletion,Slot2_JobStarts,Slot2_SelfMonitorAge,Slot3_ExpectedMachineGracefulDrainingCompletion,Slot3_JobStarts,Slot3_SelfMonitorAge,Slot4_ExpectedMachineGracefulDrainingCompletion,Slot4_JobStarts,Slot4_SelfMonitorAge,Slot5_ExpectedMachineGracefulDrainingCompletion,Slot5_JobStarts,Slot5_SelfMonitorAge,Slot6_ExpectedMachineGracefulDrainingCompletion,Slot6_JobStarts,Slot6_SelfMonitorAge,Slot7_ExpectedMachineGracefulDrainingCompletion,Slot7_JobStarts,Slot7_SelfMonitorAge,Slot8_ExpectedMachineGracefulDrainingCompletion,Slot8_JobStarts,Slot8_SelfMonitorAge,Slot1_TotalTimeClaimedBusy,Slot1_TotalTimeUnclaimedIdle,Slot2_TotalTimeClaimedBusy,Slot2_TotalTimeUnclaimedIdle,Slot3_TotalTimeClaimedBusy,Slot3_TotalTimeUnclaimedIdle,Slot4_TotalTimeClaimedBusy,Slot4_TotalTimeUnclaimedIdle,Slot5_TotalTimeClaimedBusy,Slot5_TotalTimeUnclaimedIdle,Slot6_TotalTimeClaimedBusy,Slot6_TotalTimeUnclaimedIdle,Slot7_TotalTimeClaimedBusy,Slot7_To
 talTimeUnclaimedIdle,Slot8_TotalTimeClaimedBusy,Slot8_TotalTimeUnclaimedIdle,Slot10_ExpectedMachineGracefulDrainingCompletion,Slot10_JobStarts,Slot10_SelfMonitorAge,Slot11_ExpectedMachineGracefulDrainingCompletion,Slot11_JobStarts,Slot11_SelfMonitorAge,Slot12_ExpectedMachineGracefulDrainingCompletion,Slot12_JobStarts,Slot12_SelfMonitorAge,Slot9_ExpectedMachineGracefulDrainingCompletion,Slot9_JobStarts,Slot9_SelfMonitorAge,Slot12_TotalTimeClaimedBusy,Slot10_TotalTimeClaimedBusy,Slot10_TotalTimeUnclaimedIdle,Slot11_TotalTimeClaimedBusy,Slot11_TotalTimeUnclaimedIdle,Slot12_TotalTimeUnclaimedIdle,Slot9_TotalTimeClaimedBusy,Slot9_TotalTimeUnclaimedIdle,Slot13_ExpectedMachineGracefulDrainingCompletion,Slot13_JobStarts,Slot13_SelfMonitorAge,Slot14_ExpectedMachineGracefulDrainingCompletion,Slot14_JobStarts,Slot14_SelfMonitorAge,Slot15_ExpectedMachineGracefulDrainingCompletion,Slot15_JobStarts,Slot15_SelfMonitorAge,Slot16_ExpectedMachineGracefulDrainingCompletion,Slot16_JobStarts,Slot16_Self
 MonitorAge,IsResumable,WHEN_TO_TRANSFER_OUTPUT,_condor_Requestadmin_mutex_1,_condor_Requestadmin_mutex_2,_condor_Requestadmin_mutex_3,_condor_Requestmachine_token,Requestadmin_mutex_1,Requestadmin_mutex_2,Requestadmin_mutex_3,Requestmachine_token,nyehle,IsBuildJob,IsMatlabBuildJob,TotalJobRunTime,NodeOnline,Slot13_TotalTimeClaimedBusy,Slot13_TotalTimeUnclaimedIdle,Slot14_TotalTimeClaimedBusy,Slot14_TotalTimeUnclaimedIdle,Slot15_TotalTimeClaimedBusy,Slot15_TotalTimeUnclaimedIdle,Slot16_TotalTimeClaimedBusy,Slot16_TotalTimeUnclaimedIdle,TmpIsFull,trResumable,RequiresCMSFrontier,Slot17_ExpectedMachineGracefulDrainingCompletion,Slot17_JobStarts,Slot17_SelfMonitorAge,Slot17_TotalTimeClaimedBusy,Slot17_TotalTimeUnclaimedIdle,Slot18_ExpectedMachineGracefulDrainingCompletion,Slot18_JobStarts,Slot18_SelfMonitorAge,Slot18_TotalTimeClaimedBusy,Slot18_TotalTimeUnclaimedIdle,Slot19_ExpectedMachineGracefulDrainingCompletion,Slot19_JobStarts,Slot19_SelfMonitorAge,Slot19_TotalTimeClaimedBusy,Slot19
 _TotalTimeUnclaimedIdle,Slot20_ExpectedMachineGracefulDrainingCompletion,Slot20_JobStarts,Slot20_SelfMonitorAge,Slot20_TotalTimeClaimedBusy,Slot20_TotalTimeUnclaimedIdle,Slot21_ExpectedMachineGracefulDrainingCompletion,Slot21_JobStarts,Slot21_SelfMonitorAge,Slot21_TotalTimeClaimedBusy,Slot21_TotalTimeUnclaimedIdle,Slot22_ExpectedMachineGracefulDrainingCompletion,Slot22_JobStarts,Slot22_SelfMonitorAge,Slot22_TotalTimeClaimedBusy,Slot22_TotalTimeUnclaimedIdle,Slot23_ExpectedMachineGracefulDrainingCompletion,Slot23_JobStarts,Slot23_SelfMonitorAge,Slot23_TotalTimeClaimedBusy,Slot23_TotalTimeUnclaimedIdle,Slot24_ExpectedMachineGracefulDrainingCompletion,Slot24_JobStarts,Slot24_SelfMonitorAge,Slot24_TotalTimeClaimedBusy,Slot24_TotalTimeUnclaimedIdle,Slot25_ExpectedMachineGracefulDrainingCompletion,Slot25_JobStarts,Slot25_SelfMonitorAge,Slot25_TotalTimeClaimedBusy,Slot25_TotalTimeUnclaimedIdle,Slot26_ExpectedMachineGracefulDrainingCompletion,Slot26_JobStarts,Slot26_SelfMonitorAge,Slot26_To
 talTimeClaimedBusy,Slot26_TotalTimeUnclaimedIdle,Slot27_ExpectedMachineGracefulDrainingCompletion,Slot27_JobStarts,Slot27_SelfMonitorAge,Slot27_TotalTimeClaimedBusy,Slot27_TotalTimeUnclaimedIdle,Slot28_ExpectedMachineGracefulDrainingCompletion,Slot28_JobStarts,Slot28_SelfMonitorAge,Slot28_TotalTimeClaimedBusy,Slot28_TotalTimeUnclaimedIdle,Slot29_ExpectedMachineGracefulDrainingCompletion,Slot29_JobStarts,Slot29_SelfMonitorAge,Slot29_TotalTimeClaimedBusy,Slot29_TotalTimeUnclaimedIdle,Slot30_ExpectedMachineGracefulDrainingCompletion,Slot30_JobStarts,Slot30_SelfMonitorAge,Slot30_TotalTimeClaimedBusy,Slot30_TotalTimeUnclaimedIdle,Slot31_ExpectedMachineGracefulDrainingCompletion,Slot31_JobStarts,Slot31_SelfMonitorAge,Slot31_TotalTimeClaimedBusy,Slot31_TotalTimeUnclaimedIdle,Slot32_ExpectedMachineGracefulDrainingCompletion,Slot32_JobStarts,Slot32_SelfMonitorAge,Slot32_TotalTimeClaimedBusy,Slot32_TotalTimeUnclaimedIdle,ResidentSetSize", "NumCkpts": 0, "Err": "process.err", "RecentBlockWrite
 s": 0, "JobFinishedHookDone": 1446133919, "ProcId": 0, "ImageSize": 1000000, "JobUniverse": 5, "EncryptExecuteDirectory": false, "TransferInputSizeMB": 0, "RecentBlockReads": 0, "SpooledOutputFiles": "CURLTIME_1114551,ChtcWrapper162.out,AuditLog.162,simu_3_162.txt,harvest.log,162.out", "BlockWriteKbytes": 0, "WhenToTransferOutput": "ON_EXIT", "JobCurrentStartExecutingDate": 1446107688, "ExitBySignal": false, "LastMatchTime": 1446107686, "OnExitHold": false, "OrigMaxHosts": 1, "RequestMemory": 1000, "NumJobStarts": 1, "TerminationPending": true, "TotalSuspensions": 0, "BlockReads": 0, "DAGManJobId": 49581933, "MemoryUsage": "null( ( ResidentSetSize + 1023 ) / 1024 );\nnull", "ExitCode": 0, "JobNotification": 0, "BlockReadKbytes": 0, "NumJobMatches": 1, "LocalUserCpu": 0.0d, "LastJobStatus": 2, "BufferBlockSize": 32768, "CommittedTime": 26233, "ExecutableSize_RAW": 6, "LastSuspensionTime": 0, "Matlab": "R2011b", "UserLog": "/home/xguo23/finally_2/Simulation_condor/model_3/162/process.
 log", "DAGManNodesMask": "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27", "CumulativeSlotTime": 26233.0d, "LastJobLeaseRenewal": 1446133919, "MachineAttrSlotWeight0": 1, "NumSystemHolds": 0, "BytesRecvd": 285054.0d, "CondorPlatform": "$CondorPlatform: X86_64-RedHat_6.6 $", "JOBGLIDEIN_ResourceName": "$$([IfThenElse(IsUndefined(TARGET.GLIDEIN_ResourceName), IfThenElse(IsUndefined(TARGET.GLIDEIN_Site), \"wisc.edu\", TARGET.GLIDEIN_Site), TARGET.GLIDEIN_ResourceName)])", "DAGNodeName": "162+162", "PeriodicRelease": "null( JobStatus == 5 ) && ( ( CurrentTime - EnteredCurrentStatus ) > 1800 ) && ( JobRunCount < 5 ) && ( HoldReasonCode != 6 ) && ( HoldReasonCode != 14 ) && ( HoldReasonCode != 22 );\nnull", "JobRunCount": 1, "LastRemoteHost": "slot1@e370.chtc.WISC.EDU", "JobPrio": 0, "LocalSysCpu": 0.0d, "ExecutableSize": 7, "RemoteSysCpu": 96.0d, "TransferInput": "/home/xguo23/finally_2/Simulation_condor/data/162/,/home/xguo23/finally_2/Simulation_condor/data/shared/", "PeriodicHold": false, "Wan
 tRemoteIO": true, "CommittedSuspensionTime": 0, "DAGParentNodeNames": "", "CompletionDate": 1446133919, "StreamErr": false, "RecentBlockReadKbytes": 0, "WantRemoteSyscalls": false, "NumShadowStarts": 1, "MachineAttrCpus0": 1, "DAGManNodesLog": "/home/xguo23/finally_2/Simulation_condor/model_3/./mydag.dag.nodes.log", "Owner": "xguo23", "Requirements": "null( ( OpSysMajorVer is 6 ) ) && ( MY.JobUniverse == 12 || MY.JobUniverse == 7 || MY.WantFlocking || MY.WantGlidein || TARGET.PoolName == \"CHTC\" || TARGET.COLLECTOR_HOST_STRING == \"infopool.cs.wisc.edu\" ) && ( TARGET.Arch == \"X86_64\" ) && ( TARGET.OpSys == \"LINUX\" ) && ( TARGET.Disk >= RequestDisk ) && ( TARGET.Memory >= RequestMemory ) && ( TARGET.HasFileTransfer );\nnull", "DiskUsage": 1250000, "JobLeaseDuration": 2400, "ClusterId": 49582148, "BufferSize": 524288, "IsCHTCSubmit": true, "RecentStatsLifetimeStarter": 1200, "CommittedSlotTime": 26233.0d, "Args": "--type=Matlab --version=R2011b --cmdtorun=simu_condor --unique=16
 2 -- 3", "Environment": "", "LastPublicClaimId": "<128.105.245.170:9482>#1443991414#13008#...", "Iwd": "/home/xguo23/finally_2/Simulation_condor/model_3/162", "QDate": 1446105547, "CurrentHosts": 0, "User": "xguo23@chtc.wisc.edu", "StreamOut": false }
- { "GlobalJobId": "submit-3.chtc.wisc.edu#49582154.0#1446105553", "StatsLifetimeStarter": 25874, "JobStartDate": 1446107686, "SubmitEventNotes": "DAG Node: 333+333", "JobStatus": 4, "LeaveJobInQueue": false, "StartdPrincipal": "execute-side@matchsession/128.105.245.120", "WantRHEL6": true, "OnExitRemove": true, "JobCurrentStartDate": 1446107686, "CoreSize": 0, "MATCH_EXP_JOBGLIDEIN_ResourceName": "wisc.edu", "Rank": 0.0d, "ExitStatus": 0, "ResidentSetSize": 150000, "RemoteWallClockTime": 25876.0d, "WantCheckpoint": false, "In": "/dev/null", "MaxHosts": 1, "RootDir": "/", "NumRestarts": 0, "RecentBlockWriteKbytes": 0, "DiskUsage_RAW": 1216669, "EnteredCurrentStatus": 1446133562, "ResidentSetSize_RAW": 125740, "RequestDisk": 1000000, "MyType": "Job", "PeriodicRemove": false, "Cmd": "/home/xguo23/finally_2/Simulation_condor/chtcjobwrapper", "CondorVersion": "$CondorVersion: 8.5.0 Sep 16 2015 BuildID: 341710 $", "ShouldTransferFiles": "YES", "TargetType": "Machine", "MinHosts": 1, "NumC
 kpts_RAW": 0, "RequestCpus": 1, "RemoteUserCpu": 25692.0d, "BlockWrites": 0, "NiceUser": false, "Out": "process.out", "ImageSize_RAW": 811948, "BytesSent": 30542.0d, "CumulativeSuspensionTime": 0, "TransferIn": false, "NumCkpts": 0, "Err": "process.err", "RecentBlockWrites": 0, "JobFinishedHookDone": 1446133562, "ProcId": 0, "ImageSize": 1000000, "JobUniverse": 5, "EncryptExecuteDirectory": false, "TransferInputSizeMB": 0, "RecentBlockReads": 0, "SpooledOutputFiles": "CURLTIME_523030,ChtcWrapper333.out,AuditLog.333,simu_3_333.txt,harvest.log,333.out", "BlockWriteKbytes": 0, "WhenToTransferOutput": "ON_EXIT", "JobCurrentStartExecutingDate": 1446107688, "ExitBySignal": false, "LastMatchTime": 1446107686, "OnExitHold": false, "OrigMaxHosts": 1, "RequestMemory": 1000, "NumJobStarts": 1, "TerminationPending": true, "TotalSuspensions": 0, "BlockReads": 0, "DAGManJobId": 49581933, "MemoryUsage": "null( ( ResidentSetSize + 1023 ) / 1024 );\nnull", "ExitCode": 0, "JobNotification": 0, "Block
 ReadKbytes": 0, "NumJobMatches": 1, "LocalUserCpu": 0.0d, "LastJobStatus": 2, "BufferBlockSize": 32768, "CommittedTime": 25876, "ExecutableSize_RAW": 6, "LastSuspensionTime": 0, "Matlab": "R2011b", "UserLog": "/home/xguo23/finally_2/Simulation_condor/model_3/333/process.log", "DAGManNodesMask": "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27", "CumulativeSlotTime": 25876.0d, "LastJobLeaseRenewal": 1446133562, "MachineAttrSlotWeight0": 1, "NumSystemHolds": 0, "BytesRecvd": 285054.0d, "CondorPlatform": "$CondorPlatform: X86_64-RedHat_6.6 $", "JOBGLIDEIN_ResourceName": "$$([IfThenElse(IsUndefined(TARGET.GLIDEIN_ResourceName), IfThenElse(IsUndefined(TARGET.GLIDEIN_Site), \"wisc.edu\", TARGET.GLIDEIN_Site), TARGET.GLIDEIN_ResourceName)])", "DAGNodeName": "333+333", "PeriodicRelease": "null( JobStatus == 5 ) && ( ( CurrentTime - EnteredCurrentStatus ) > 1800 ) && ( JobRunCount < 5 ) && ( HoldReasonCode != 6 ) && ( HoldReasonCode != 14 ) && ( HoldReasonCode != 22 );\nnull", "JobRunCount": 1, "LastR
 emoteHost": "slot1@e320.chtc.wisc.edu", "JobPrio": 0, "LocalSysCpu": 0.0d, "ExecutableSize": 7, "RemoteSysCpu": 157.0d, "TransferInput": "/home/xguo23/finally_2/Simulation_condor/data/333/,/home/xguo23/finally_2/Simulation_condor/data/shared/", "PeriodicHold": false, "WantRemoteIO": true, "CommittedSuspensionTime": 0, "DAGParentNodeNames": "", "CompletionDate": 1446133562, "StreamErr": false, "RecentBlockReadKbytes": 0, "WantRemoteSyscalls": false, "NumShadowStarts": 1, "MachineAttrCpus0": 1, "DAGManNodesLog": "/home/xguo23/finally_2/Simulation_condor/model_3/./mydag.dag.nodes.log", "Owner": "xguo23", "Requirements": "null( ( OpSysMajorVer is 6 ) ) && ( MY.JobUniverse == 12 || MY.JobUniverse == 7 || MY.WantFlocking || MY.WantGlidein || TARGET.PoolName == \"CHTC\" || TARGET.COLLECTOR_HOST_STRING == \"infopool.cs.wisc.edu\" ) && ( TARGET.Arch == \"X86_64\" ) && ( TARGET.OpSys == \"LINUX\" ) && ( TARGET.Disk >= RequestDisk ) && ( TARGET.Memory >= RequestMemory ) && ( TARGET.HasFileTran
 sfer );\nnull", "DiskUsage": 1250000, "JobLeaseDuration": 2400, "ClusterId": 49582154, "BufferSize": 524288, "IsCHTCSubmit": true, "RecentStatsLifetimeStarter": 1200, "CommittedSlotTime": 25876.0d, "Args": "--type=Matlab --version=R2011b --cmdtorun=simu_condor --unique=333 -- 3", "Environment": "", "LastPublicClaimId": "<128.105.245.120:45185>#1443991409#14242#...", "Iwd": "/home/xguo23/finally_2/Simulation_condor/model_3/333", "QDate": 1446105553, "CurrentHosts": 0, "User": "xguo23@chtc.wisc.edu", "StreamOut": false }
- { "GlobalJobId": "submit-3.chtc.wisc.edu#49582177.0#1446105581", "StatsLifetimeStarter": 25025, "JobStartDate": 1446108665, "SubmitEventNotes": "DAG Node: 145+145", "JobStatus": 4, "LeaveJobInQueue": false, "StartdPrincipal": "execute-side@matchsession/128.104.55.57", "WantRHEL6": true, "OnExitRemove": true, "JobCurrentStartDate": 1446108665, "CoreSize": 0, "MATCH_EXP_JOBGLIDEIN_ResourceName": "wisc.edu", "Rank": 0.0d, "ExitStatus": 0, "ResidentSetSize": 75000, "RemoteWallClockTime": 25026.0d, "WantCheckpoint": false, "In": "/dev/null", "MaxHosts": 1, "RootDir": "/", "NumRestarts": 0, "RecentBlockWriteKbytes": 4, "DiskUsage_RAW": 1216669, "EnteredCurrentStatus": 1446133691, "ResidentSetSize_RAW": 73308, "RequestDisk": 1000000, "MyType": "Job", "PeriodicRemove": false, "Cmd": "/home/xguo23/finally_2/Simulation_condor/chtcjobwrapper", "CondorVersion": "$CondorVersion: 8.5.0 Sep 16 2015 BuildID: 341710 $", "ShouldTransferFiles": "YES", "TargetType": "Machine", "MinHosts": 1, "NumCkpts
 _RAW": 0, "RequestCpus": 1, "RemoteUserCpu": 24770.0d, "BlockWrites": 1, "NiceUser": false, "Out": "process.out", "ImageSize_RAW": 120972, "BytesSent": 28290.0d, "CumulativeSuspensionTime": 0, "TransferIn": false, "NumCkpts": 0, "Err": "process.err", "RecentBlockWrites": 1, "JobFinishedHookDone": 1446133691, "ProcId": 0, "ImageSize": 125000, "JobUniverse": 5, "EncryptExecuteDirectory": false, "TransferInputSizeMB": 0, "RecentBlockReads": 146, "SpooledOutputFiles": "CURLTIME_4179033,ChtcWrapper145.out,AuditLog.145,simu_3_145.txt,harvest.log,145.out", "BlockWriteKbytes": 4, "WhenToTransferOutput": "ON_EXIT", "JobCurrentStartExecutingDate": 1446108666, "ExitBySignal": false, "LastMatchTime": 1446108665, "OnExitHold": false, "OrigMaxHosts": 1, "RequestMemory": 1000, "NumJobStarts": 1, "TerminationPending": true, "TotalSuspensions": 0, "BlockReads": 796, "DAGManJobId": 49581933, "MemoryUsage": "null( ( ResidentSetSize + 1023 ) / 1024 );\nnull", "ExitCode": 0, "JobNotification": 0, "Block
 ReadKbytes": 28476, "NumJobMatches": 1, "LocalUserCpu": 0.0d, "LastJobStatus": 2, "BufferBlockSize": 32768, "CommittedTime": 25026, "ExecutableSize_RAW": 6, "LastSuspensionTime": 0, "Matlab": "R2011b", "UserLog": "/home/xguo23/finally_2/Simulation_condor/model_3/145/process.log", "DAGManNodesMask": "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27", "CumulativeSlotTime": 25026.0d, "LastJobLeaseRenewal": 1446133691, "MachineAttrSlotWeight0": 1, "NumSystemHolds": 0, "BytesRecvd": 285054.0d, "CondorPlatform": "$CondorPlatform: X86_64-RedHat_6.6 $", "JOBGLIDEIN_ResourceName": "$$([IfThenElse(IsUndefined(TARGET.GLIDEIN_ResourceName), IfThenElse(IsUndefined(TARGET.GLIDEIN_Site), \"wisc.edu\", TARGET.GLIDEIN_Site), TARGET.GLIDEIN_ResourceName)])", "DAGNodeName": "145+145", "PeriodicRelease": "null( JobStatus == 5 ) && ( ( CurrentTime - EnteredCurrentStatus ) > 1800 ) && ( JobRunCount < 5 ) && ( HoldReasonCode != 6 ) && ( HoldReasonCode != 14 ) && ( HoldReasonCode != 22 );\nnull", "JobRunCount": 1, "L
 astRemoteHost": "slot1@c038.chtc.wisc.edu", "JobPrio": 0, "LocalSysCpu": 0.0d, "ExecutableSize": 7, "RemoteSysCpu": 217.0d, "TransferInput": "/home/xguo23/finally_2/Simulation_condor/data/145/,/home/xguo23/finally_2/Simulation_condor/data/shared/", "PeriodicHold": false, "WantRemoteIO": true, "CommittedSuspensionTime": 0, "DAGParentNodeNames": "", "CompletionDate": 1446133691, "StreamErr": false, "RecentBlockReadKbytes": 1932, "WantRemoteSyscalls": false, "NumShadowStarts": 1, "MachineAttrCpus0": 1, "DAGManNodesLog": "/home/xguo23/finally_2/Simulation_condor/model_3/./mydag.dag.nodes.log", "Owner": "xguo23", "Requirements": "null( ( OpSysMajorVer is 6 ) ) && ( MY.JobUniverse == 12 || MY.JobUniverse == 7 || MY.WantFlocking || MY.WantGlidein || TARGET.PoolName == \"CHTC\" || TARGET.COLLECTOR_HOST_STRING == \"infopool.cs.wisc.edu\" ) && ( TARGET.Arch == \"X86_64\" ) && ( TARGET.OpSys == \"LINUX\" ) && ( TARGET.Disk >= RequestDisk ) && ( TARGET.Memory >= RequestMemory ) && ( TARGET.HasF
 ileTransfer );\nnull", "DiskUsage": 1250000, "JobLeaseDuration": 2400, "ClusterId": 49582177, "BufferSize": 524288, "IsCHTCSubmit": true, "RecentStatsLifetimeStarter": 1200, "CommittedSlotTime": 25026.0d, "Args": "--type=Matlab --version=R2011b --cmdtorun=simu_condor --unique=145 -- 3", "Environment": "", "LastPublicClaimId": "<128.104.55.57:49793>#1445322694#1541#...", "Iwd": "/home/xguo23/finally_2/Simulation_condor/model_3/145", "QDate": 1446105581, "CurrentHosts": 0, "User": "xguo23@chtc.wisc.edu", "StreamOut": false }
- { "GlobalJobId": "submit-3.chtc.wisc.edu#49582178.0#1446105581", "StatsLifetimeStarter": 24871, "JobStartDate": 1446108666, "SubmitEventNotes": "DAG Node: 154+154", "JobStatus": 4, "LeaveJobInQueue": false, "StartdPrincipal": "execute-side@matchsession/128.105.245.158", "WantRHEL6": true, "OnExitRemove": true, "JobCurrentStartDate": 1446108666, "CoreSize": 0, "MATCH_EXP_JOBGLIDEIN_ResourceName": "wisc.edu", "Rank": 0.0d, "ExitStatus": 0, "ResidentSetSize": 150000, "RemoteWallClockTime": 24874.0d, "WantCheckpoint": false, "In": "/dev/null", "MaxHosts": 1, "RootDir": "/", "NumRestarts": 0, "RecentBlockWriteKbytes": 0, "DiskUsage_RAW": 1216669, "EnteredCurrentStatus": 1446133540, "ResidentSetSize_RAW": 125792, "RequestDisk": 1000000, "MyType": "Job", "PeriodicRemove": false, "Cmd": "/home/xguo23/finally_2/Simulation_condor/chtcjobwrapper", "CondorVersion": "$CondorVersion: 8.5.0 Sep 16 2015 BuildID: 341710 $", "ShouldTransferFiles": "YES", "TargetType": "Machine", "MinHosts": 1, "NumC
 kpts_RAW": 0, "RequestCpus": 1, "RemoteUserCpu": 24626.0d, "BlockWrites": 0, "NiceUser": false, "Out": "process.out", "ImageSize_RAW": 811948, "BytesSent": 30559.0d, "CumulativeSuspensionTime": 0, "TransferIn": false, "NumCkpts": 0, "Err": "process.err", "RecentBlockWrites": 0, "JobFinishedHookDone": 1446133540, "ProcId": 0, "ImageSize": 1000000, "JobUniverse": 5, "EncryptExecuteDirectory": false, "TransferInputSizeMB": 0, "RecentBlockReads": 0, "SpooledOutputFiles": "CURLTIME_1382128,ChtcWrapper154.out,AuditLog.154,simu_3_154.txt,harvest.log,154.out", "BlockWriteKbytes": 0, "WhenToTransferOutput": "ON_EXIT", "JobCurrentStartExecutingDate": 1446108668, "ExitBySignal": false, "LastMatchTime": 1446108666, "OnExitHold": false, "OrigMaxHosts": 1, "RequestMemory": 1000, "NumJobStarts": 1, "TerminationPending": true, "TotalSuspensions": 0, "BlockReads": 0, "DAGManJobId": 49581933, "MemoryUsage": "null( ( ResidentSetSize + 1023 ) / 1024 );\nnull", "ExitCode": 0, "JobNotification": 0, "Bloc
 kReadKbytes": 0, "NumJobMatches": 1, "LocalUserCpu": 0.0d, "LastJobStatus": 2, "BufferBlockSize": 32768, "CommittedTime": 24874, "ExecutableSize_RAW": 6, "LastSuspensionTime": 0, "Matlab": "R2011b", "UserLog": "/home/xguo23/finally_2/Simulation_condor/model_3/154/process.log", "DAGManNodesMask": "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27", "CumulativeSlotTime": 24874.0d, "LastJobLeaseRenewal": 1446133540, "MachineAttrSlotWeight0": 1, "NumSystemHolds": 0, "BytesRecvd": 285054.0d, "CondorPlatform": "$CondorPlatform: X86_64-RedHat_6.6 $", "JOBGLIDEIN_ResourceName": "$$([IfThenElse(IsUndefined(TARGET.GLIDEIN_ResourceName), IfThenElse(IsUndefined(TARGET.GLIDEIN_Site), \"wisc.edu\", TARGET.GLIDEIN_Site), TARGET.GLIDEIN_ResourceName)])", "DAGNodeName": "154+154", "PeriodicRelease": "null( JobStatus == 5 ) && ( ( CurrentTime - EnteredCurrentStatus ) > 1800 ) && ( JobRunCount < 5 ) && ( HoldReasonCode != 6 ) && ( HoldReasonCode != 14 ) && ( HoldReasonCode != 22 );\nnull", "JobRunCount": 1, "Last
 RemoteHost": "slot1@e358.chtc.wisc.edu", "JobPrio": 0, "LocalSysCpu": 0.0d, "ExecutableSize": 7, "RemoteSysCpu": 183.0d, "TransferInput": "/home/xguo23/finally_2/Simulation_condor/data/154/,/home/xguo23/finally_2/Simulation_condor/data/shared/", "PeriodicHold": false, "WantRemoteIO": true, "CommittedSuspensionTime": 0, "DAGParentNodeNames": "", "CompletionDate": 1446133540, "StreamErr": false, "RecentBlockReadKbytes": 0, "WantRemoteSyscal

<TRUNCATED>


[38/50] [abbrv] incubator-asterixdb git commit: Move merged files

Posted by im...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d630d1a2/asterixdb/asterix-external-data/src/test/resources/classad-with-temporals.classads
----------------------------------------------------------------------
diff --git a/asterixdb/asterix-external-data/src/test/resources/classad-with-temporals.classads b/asterixdb/asterix-external-data/src/test/resources/classad-with-temporals.classads
new file mode 100644
index 0000000..e20be09
--- /dev/null
+++ b/asterixdb/asterix-external-data/src/test/resources/classad-with-temporals.classads
@@ -0,0 +1,134 @@
+
+    [
+        Schedd = "submit-5.chtc.wisc.edu";
+        BlockWrites = 3;
+        LastJobStatus = 2;
+        JobCurrentStartExecutingDate = 1459300924;
+        WantRemoteIO = true;
+        RequestCpus = 1;
+        NumShadowStarts = 1;
+        RemoteUserCpu = 6.607100000000000E+04;
+        NiceUser = false;
+        BytesRecvd = 7.292000000000000E+03;
+        RequestMemory = 12288;
+        ResidentSetSize = 750000;
+        StreamOut = false;
+        SpooledOutputFiles = "job697_results.tar.gz";
+        Arguments = "";
+        OnExitRemove = true;
+        ImageSize_RAW = 607024;
+        RemoteWallClockTime = 6.629100000000000E+04;
+        MachineAttrSlotWeight0 = 1;
+        ExecutableSize = 4;
+        JobStatus = 4;
+        DAGParentNodeNames = "";
+        ExitCode = 0;
+        DAGManNodesMask = "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27";
+        BytesSent = 8.580547200000000E+07;
+        LastRejMatchReason = "no match found ";
+        LastSuspensionTime = 0;
+        ExecutableSize_RAW = 4;
+        RecentBlockReadKbytes = 0;
+        TransferInputSizeMB = 0;
+        BlockReadKbytes = 0;
+        LocalSysCpu = 0.0;
+        Iwd = "/home/grandaduarte/mars/mhb1";
+        Cmd = "/home/grandaduarte/mars/mhb1/job697.sh";
+        CommittedSuspensionTime = 0;
+        RecentStatsLifetimeStarter = 1200;
+        TargetType = "Machine";
+        WhenToTransferOutput = "ON_EXIT";
+        BufferSize = 524288;
+        JobCurrentStartTransferOutputDate = 1459367212;
+        RecentBlockWrites = 0;
+        CompletionDate = 1459367213;
+        LastMatchTime = 1459300922;
+        LastJobLeaseRenewal = 1459367213;
+        DAGManNodesLog = "/home/grandaduarte/mars/mhb1/./dagman.dag.nodes.log";
+        ClusterId = 16798777;
+        JobUniverse = 5;
+        NumJobStarts = 1;
+        ProcId = 0;
+        PeriodicHold = false;
+        CondorPlatform = "$CondorPlatform: x86_64_RedHat6 $";
+        JobFinishedHookDone = 1459367213;
+        In = "/dev/null";
+        DiskUsage = 7500000;
+        EncryptExecuteDirectory = false;
+        User = "grandaduarte@chtc.wisc.edu";
+        LeaveJobInQueue = false;
+        Requirements = ( MY.JobUniverse == 12 || MY.JobUniverse == 7 || ( ( MY.WantFlocking || MY.WantGlidein || TARGET.PoolName == "CHTC" ) && ( TARGET.OpSysMajorVer == MY.LinuxVer || TARGET.OpSysMajorVer == MY.LinuxVerAlt || TARGET.OpSysMajorVer == MY.WinVer ) ) ) && ( TARGET.Arch == "X86_64" ) && ( TARGET.OpSys == "LINUX" ) && ( TARGET.Disk >= RequestDisk ) && ( TARGET.Memory >= RequestMemory ) && ( TARGET.HasFileTransfer );
+        MinHosts = 1;
+        MaxHosts = 1;
+        StartdPrincipal = "execute-side@matchsession/128.105.245.175";
+        JOBGLIDEIN_ResourceName = "$$([IfThenElse(IsUndefined(TARGET.GLIDEIN_ResourceName), IfThenElse(IsUndefined(TARGET.GLIDEIN_Site), \"wisc.edu\", TARGET.GLIDEIN_Site), TARGET.GLIDEIN_ResourceName)])";
+        EnteredCurrentStatus = 1459367213;
+        JobLeaseDuration = 2400;
+        QDate = 1459298672;
+        AccountingGroup = EngrPhysics_Wilson;
+        MemoryUsage = ( ( ResidentSetSize + 1023 ) / 1024 );
+        TerminationPending = true;
+        NumRestarts = 0;
+        NumSystemHolds = 0;
+        Environment = "";
+        LinuxVer = 6;
+        DAGNodeName = "_mars_MH1B1_661.inp";
+        CoreSize = 0;
+        OnExitHold = false;
+        CondorVersion = "$CondorVersion: 8.5.3 Mar 14 2016 BuildID: 358989 $";
+        UserLog = "/home/grandaduarte/mars/mhb1/job697.log";
+        JobCurrentStartDate = 1459300922;
+        MATCH_EXP_JOBGLIDEIN_ResourceName = "wisc.edu";
+        BufferBlockSize = 32768;
+        BlockWriteKbytes = 24;
+        ExitBySignal = false;
+        DAGManJobId = 16795779;
+        MachineAttrCpus0 = 1;
+        WantRemoteSyscalls = false;
+        CumulativeSuspensionTime = 0;
+        MyType = "Job";
+        Rank = 0.0;
+        JobNotification = 0;
+        Owner = "grandaduarte";
+        LinuxVerAlt = 6;
+        Err = "job697.err";
+        PeriodicRemove = false;
+        CommittedTime = 66291;
+        RecentBlockWriteKbytes = 0;
+        TransferIn = false;
+        ExitStatus = 0;
+        ShouldTransferFiles = "YES";
+        IsCHTCSubmit = true;
+        NumJobMatches = 1;
+        RootDir = "/";
+        JobStartDate = 1459300922;
+        JobPrio = 0;
+        CurrentHosts = 0;
+        GlobalJobId = "submit-5.chtc.wisc.edu#16798777.0#1459298672";
+        RemoteSysCpu = 6.100000000000000E+01;
+        LastRejMatchTime = 1459300921;
+        TotalSuspensions = 0;
+        CommittedSlotTime = 6.629100000000000E+04;
+        WantCheckpoint = false;
+        BlockReads = 0;
+        LastRemoteHost = "slot1_7@e375.chtc.wisc.edu";
+        TransferInput = "job697.sh";
+        LocalUserCpu = 0.0;
+        PeriodicRelease = false;
+        WinVer = 601;
+        LastPublicClaimId = "<128.105.245.175:9618>#1457031418#19008#...";
+        NumCkpts_RAW = 0;
+        Out = "job697.out";
+        SubmitEventNotes = "DAG Node: _mars_MH1B1_661.inp";
+        CumulativeSlotTime = 6.629100000000000E+04;
+        JobRunCount = 1;
+        RecentBlockReads = 0;
+        StreamErr = false;
+        DiskUsage_RAW = 6625678;
+        RequestDisk = 20971520;
+        ResidentSetSize_RAW = 597536;
+        OrigMaxHosts = 1;
+        NumCkpts = 0;
+        StatsLifetimeStarter = 66289;
+        ImageSize = 750000
+    ]
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d630d1a2/asterixdb/asterix-external-data/src/test/resources/results/classad-with-temporals.adm
----------------------------------------------------------------------
diff --git a/asterixdb/asterix-external-data/src/test/resources/results/classad-with-temporals.adm b/asterixdb/asterix-external-data/src/test/resources/results/classad-with-temporals.adm
new file mode 100644
index 0000000..3cd630b
--- /dev/null
+++ b/asterixdb/asterix-external-data/src/test/resources/results/classad-with-temporals.adm
@@ -0,0 +1 @@
+{ "GlobalJobId": "submit-5.chtc.wisc.edu#16798777.0#1459298672", "Owner": "grandaduarte", "ClusterId": 16798777i32, "ProcId": 0i32, "RemoteWallClockTime": duration("PT18H24M51S"), "CompletionDate": datetime("2016-03-30T19:46:53.000Z"), "QDate": datetime("2016-03-30T00:44:32.000Z"), "JobCurrentStartDate": datetime("2016-03-30T01:22:02.000Z"), "JobStartDate": datetime("2016-03-30T01:22:02.000Z"), "JobCurrentStartExecutingDate": datetime("2016-03-30T01:22:04.000Z"), "StatsLifetimeStarter": 66289, "SubmitEventNotes": "DAG Node: _mars_MH1B1_661.inp", "JobStatus": 4, "LeaveJobInQueue": false, "StartdPrincipal": "execute-side@matchsession/128.105.245.175", "OnExitRemove": true, "CoreSize": 0, "MATCH_EXP_JOBGLIDEIN_ResourceName": "wisc.edu", "Rank": 0.0d, "ExitStatus": 0, "ResidentSetSize": 750000, "WantCheckpoint": false, "In": "/dev/null", "MaxHosts": 1, "RootDir": "/", "NumRestarts": 0, "RecentBlockWriteKbytes": 0, "DiskUsage_RAW": 6625678, "EnteredCurrentStatus": 1459367213, "ResidentSe
 tSize_RAW": 597536, "RequestDisk": 20971520, "MyType": "Job", "PeriodicRemove": false, "Cmd": "/home/grandaduarte/mars/mhb1/job697.sh", "CondorVersion": "$CondorVersion: 8.5.3 Mar 14 2016 BuildID: 358989 $", "ShouldTransferFiles": "YES", "TargetType": "Machine", "MinHosts": 1, "NumCkpts_RAW": 0, "RequestCpus": 1, "WinVer": 601, "RemoteUserCpu": 66071.0d, "BlockWrites": 3, "NiceUser": false, "Out": "job697.out", "ImageSize_RAW": 607024, "BytesSent": 8.5805472E7d, "CumulativeSuspensionTime": 0, "TransferIn": false, "NumCkpts": 0, "Err": "job697.err", "RecentBlockWrites": 0, "JobFinishedHookDone": 1459367213, "ImageSize": 750000, "Schedd": "submit-5.chtc.wisc.edu", "JobUniverse": 5, "EncryptExecuteDirectory": false, "TransferInputSizeMB": 0, "RecentBlockReads": 0, "SpooledOutputFiles": "job697_results.tar.gz", "BlockWriteKbytes": 24, "WhenToTransferOutput": "ON_EXIT", "ExitBySignal": false, "LastMatchTime": 1459300922, "OnExitHold": false, "OrigMaxHosts": 1, "RequestMemory": 12288, "Nu
 mJobStarts": 1, "TerminationPending": true, "TotalSuspensions": 0, "BlockReads": 0, "DAGManJobId": 16795779, "MemoryUsage": "( ( ResidentSetSize + 1023 ) / 1024 )", "ExitCode": 0, "JobNotification": 0, "BlockReadKbytes": 0, "AccountingGroup": "EngrPhysics_Wilson", "NumJobMatches": 1, "LocalUserCpu": 0.0d, "LastJobStatus": 2, "BufferBlockSize": 32768, "CommittedTime": 66291, "ExecutableSize_RAW": 4, "LastRejMatchReason": "no match found ", "LastSuspensionTime": 0, "UserLog": "/home/grandaduarte/mars/mhb1/job697.log", "DAGManNodesMask": "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27", "CumulativeSlotTime": 66291.0d, "LastJobLeaseRenewal": 1459367213, "MachineAttrSlotWeight0": 1, "NumSystemHolds": 0, "BytesRecvd": 7292.0d, "CondorPlatform": "$CondorPlatform: x86_64_RedHat6 $", "JOBGLIDEIN_ResourceName": "$$([IfThenElse(IsUndefined(TARGET.GLIDEIN_ResourceName), IfThenElse(IsUndefined(TARGET.GLIDEIN_Site), \"wisc.edu\", TARGET.GLIDEIN_Site), TARGET.GLIDEIN_ResourceName)])", "DAGNodeName": "_mars
 _MH1B1_661.inp", "PeriodicRelease": false, "JobRunCount": 1, "LastRemoteHost": "slot1_7@e375.chtc.wisc.edu", "JobPrio": 0, "LocalSysCpu": 0.0d, "ExecutableSize": 4, "RemoteSysCpu": 61.0d, "TransferInput": "job697.sh", "PeriodicHold": false, "WantRemoteIO": true, "CommittedSuspensionTime": 0, "DAGParentNodeNames": "", "StreamErr": false, "RecentBlockReadKbytes": 0, "WantRemoteSyscalls": false, "NumShadowStarts": 1, "MachineAttrCpus0": 1, "DAGManNodesLog": "/home/grandaduarte/mars/mhb1/./dagman.dag.nodes.log", "Requirements": "( MY.JobUniverse == 12 || MY.JobUniverse == 7 || ( ( MY.WantFlocking || MY.WantGlidein || TARGET.PoolName == \"CHTC\" ) && ( TARGET.OpSysMajorVer == MY.LinuxVer || TARGET.OpSysMajorVer == MY.LinuxVerAlt || TARGET.OpSysMajorVer == MY.WinVer ) ) ) && ( TARGET.Arch == \"X86_64\" ) && ( TARGET.OpSys == \"LINUX\" ) && ( TARGET.Disk >= RequestDisk ) && ( TARGET.Memory >= RequestMemory ) && ( TARGET.HasFileTransfer )", "LinuxVerAlt": 6, "DiskUsage": 7500000, "LinuxVer"
 : 6, "LastRejMatchTime": 1459300921, "JobLeaseDuration": 2400, "BufferSize": 524288, "IsCHTCSubmit": true, "JobCurrentStartTransferOutputDate": 1459367212, "RecentStatsLifetimeStarter": 1200, "CommittedSlotTime": 66291.0d, "Environment": "", "LastPublicClaimId": "<128.105.245.175:9618>#1457031418#19008#...", "Iwd": "/home/grandaduarte/mars/mhb1", "CurrentHosts": 0, "Arguments": "", "User": "grandaduarte@chtc.wisc.edu", "StreamOut": false }

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d630d1a2/asterixdb/asterix-installer/src/test/resources/transactionts/queries/query_after_restart/dataset-with-meta-record/dataset-with-meta-record.1.script.aql
----------------------------------------------------------------------
diff --git a/asterixdb/asterix-installer/src/test/resources/transactionts/queries/query_after_restart/dataset-with-meta-record/dataset-with-meta-record.1.script.aql b/asterixdb/asterix-installer/src/test/resources/transactionts/queries/query_after_restart/dataset-with-meta-record/dataset-with-meta-record.1.script.aql
new file mode 100644
index 0000000..cc46136
--- /dev/null
+++ b/asterixdb/asterix-installer/src/test/resources/transactionts/queries/query_after_restart/dataset-with-meta-record/dataset-with-meta-record.1.script.aql
@@ -0,0 +1 @@
+create_and_start.sh

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d630d1a2/asterixdb/asterix-installer/src/test/resources/transactionts/queries/query_after_restart/dataset-with-meta-record/dataset-with-meta-record.2.ddl.aql
----------------------------------------------------------------------
diff --git a/asterixdb/asterix-installer/src/test/resources/transactionts/queries/query_after_restart/dataset-with-meta-record/dataset-with-meta-record.2.ddl.aql b/asterixdb/asterix-installer/src/test/resources/transactionts/queries/query_after_restart/dataset-with-meta-record/dataset-with-meta-record.2.ddl.aql
new file mode 100644
index 0000000..d3317e4
--- /dev/null
+++ b/asterixdb/asterix-installer/src/test/resources/transactionts/queries/query_after_restart/dataset-with-meta-record/dataset-with-meta-record.2.ddl.aql
@@ -0,0 +1,58 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*
+ * Description  : Create a change feed with meta-data and test ingestion of records
+ * Expected Res : Success
+ * Date         : 24th Feb 2016
+ */
+
+drop dataverse KeyVerse if exists;
+create dataverse KeyVerse;
+use dataverse KeyVerse;
+
+create type DocumentType as open{
+};
+
+create type KVMetaType as open{
+"key":string,
+bucket:string,
+vbucket:int32,
+seq:int64,
+cas:int64,
+creationTime:int64,
+expiration:int32,
+flags:int32,
+revSeq:int64,
+lockTime:int32
+};
+
+create dataset KVStore(DocumentType) with meta(KVMetaType)primary key meta()."key";
+
+create feed KVChangeStream using adapter(
+    ("type-name"="DocumentType"),
+    ("meta-type-name"="KVMetaType"),
+    ("reader"="kv_test"),
+    ("parser"="record-with-metadata"),
+    ("format"="dcp"),
+    ("record-format"="json"),
+    ("change-feed"="true"),
+    ("key-indexes"="0"),
+    ("key-indicators"="1"),
+    ("num-of-records"="1000")
+);
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d630d1a2/asterixdb/asterix-installer/src/test/resources/transactionts/queries/query_after_restart/dataset-with-meta-record/dataset-with-meta-record.3.update.aql
----------------------------------------------------------------------
diff --git a/asterixdb/asterix-installer/src/test/resources/transactionts/queries/query_after_restart/dataset-with-meta-record/dataset-with-meta-record.3.update.aql b/asterixdb/asterix-installer/src/test/resources/transactionts/queries/query_after_restart/dataset-with-meta-record/dataset-with-meta-record.3.update.aql
new file mode 100644
index 0000000..7faf013
--- /dev/null
+++ b/asterixdb/asterix-installer/src/test/resources/transactionts/queries/query_after_restart/dataset-with-meta-record/dataset-with-meta-record.3.update.aql
@@ -0,0 +1,27 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*
+ * Description  : Create a change feed with meta-data and test ingestion of records
+ * Expected Res : Success
+ * Date         : 24th Feb 2016
+ */
+use dataverse KeyVerse;
+
+set wait-for-completion-feed "true";
+connect feed KVChangeStream to dataset KVStore;
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d630d1a2/asterixdb/asterix-installer/src/test/resources/transactionts/queries/query_after_restart/dataset-with-meta-record/dataset-with-meta-record.4.script.aql
----------------------------------------------------------------------
diff --git a/asterixdb/asterix-installer/src/test/resources/transactionts/queries/query_after_restart/dataset-with-meta-record/dataset-with-meta-record.4.script.aql b/asterixdb/asterix-installer/src/test/resources/transactionts/queries/query_after_restart/dataset-with-meta-record/dataset-with-meta-record.4.script.aql
new file mode 100644
index 0000000..3ba1dc0
--- /dev/null
+++ b/asterixdb/asterix-installer/src/test/resources/transactionts/queries/query_after_restart/dataset-with-meta-record/dataset-with-meta-record.4.script.aql
@@ -0,0 +1 @@
+stop_and_start.sh

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d630d1a2/asterixdb/asterix-installer/src/test/resources/transactionts/queries/query_after_restart/dataset-with-meta-record/dataset-with-meta-record.5.query.aql
----------------------------------------------------------------------
diff --git a/asterixdb/asterix-installer/src/test/resources/transactionts/queries/query_after_restart/dataset-with-meta-record/dataset-with-meta-record.5.query.aql b/asterixdb/asterix-installer/src/test/resources/transactionts/queries/query_after_restart/dataset-with-meta-record/dataset-with-meta-record.5.query.aql
new file mode 100644
index 0000000..9db20a9
--- /dev/null
+++ b/asterixdb/asterix-installer/src/test/resources/transactionts/queries/query_after_restart/dataset-with-meta-record/dataset-with-meta-record.5.query.aql
@@ -0,0 +1,29 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*
+ * Description  : Create a change feed and test ingestion of records
+ * Expected Res : Success
+ * Date         : 24th Feb 2016
+ */
+use dataverse KeyVerse;
+
+count(
+    for $d in dataset KVStore
+    return $d
+);
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d630d1a2/asterixdb/asterix-installer/src/test/resources/transactionts/queries/query_after_restart/dataset-with-meta-record/dataset-with-meta-record.6.script.aql
----------------------------------------------------------------------
diff --git a/asterixdb/asterix-installer/src/test/resources/transactionts/queries/query_after_restart/dataset-with-meta-record/dataset-with-meta-record.6.script.aql b/asterixdb/asterix-installer/src/test/resources/transactionts/queries/query_after_restart/dataset-with-meta-record/dataset-with-meta-record.6.script.aql
new file mode 100644
index 0000000..10e1a51
--- /dev/null
+++ b/asterixdb/asterix-installer/src/test/resources/transactionts/queries/query_after_restart/dataset-with-meta-record/dataset-with-meta-record.6.script.aql
@@ -0,0 +1 @@
+stop_and_delete.sh

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d630d1a2/asterixdb/asterix-installer/src/test/resources/transactionts/results/query_after_restart/dataset-with-meta-record/dataset-with-meta-record.5.adm
----------------------------------------------------------------------
diff --git a/asterixdb/asterix-installer/src/test/resources/transactionts/results/query_after_restart/dataset-with-meta-record/dataset-with-meta-record.5.adm b/asterixdb/asterix-installer/src/test/resources/transactionts/results/query_after_restart/dataset-with-meta-record/dataset-with-meta-record.5.adm
new file mode 100644
index 0000000..c31da8b
--- /dev/null
+++ b/asterixdb/asterix-installer/src/test/resources/transactionts/results/query_after_restart/dataset-with-meta-record/dataset-with-meta-record.5.adm
@@ -0,0 +1 @@
+804
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d630d1a2/asterixdb/asterix-installer/src/test/resources/transactionts/scripts/query_after_restart/dataset-with-meta-record/create_and_start.sh
----------------------------------------------------------------------
diff --git a/asterixdb/asterix-installer/src/test/resources/transactionts/scripts/query_after_restart/dataset-with-meta-record/create_and_start.sh b/asterixdb/asterix-installer/src/test/resources/transactionts/scripts/query_after_restart/dataset-with-meta-record/create_and_start.sh
new file mode 100755
index 0000000..945f01d
--- /dev/null
+++ b/asterixdb/asterix-installer/src/test/resources/transactionts/scripts/query_after_restart/dataset-with-meta-record/create_and_start.sh
@@ -0,0 +1 @@
+$MANAGIX_HOME/bin/managix create -n nc1 -c $MANAGIX_HOME/clusters/local/local.xml;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d630d1a2/asterixdb/asterix-installer/src/test/resources/transactionts/scripts/query_after_restart/dataset-with-meta-record/stop_and_delete.sh
----------------------------------------------------------------------
diff --git a/asterixdb/asterix-installer/src/test/resources/transactionts/scripts/query_after_restart/dataset-with-meta-record/stop_and_delete.sh b/asterixdb/asterix-installer/src/test/resources/transactionts/scripts/query_after_restart/dataset-with-meta-record/stop_and_delete.sh
new file mode 100755
index 0000000..d7deea3
--- /dev/null
+++ b/asterixdb/asterix-installer/src/test/resources/transactionts/scripts/query_after_restart/dataset-with-meta-record/stop_and_delete.sh
@@ -0,0 +1,3 @@
+$MANAGIX_HOME/bin/managix stop -n nc1;
+$MANAGIX_HOME/bin/managix delete -n nc1;
+

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d630d1a2/asterixdb/asterix-installer/src/test/resources/transactionts/scripts/query_after_restart/dataset-with-meta-record/stop_and_start.sh
----------------------------------------------------------------------
diff --git a/asterixdb/asterix-installer/src/test/resources/transactionts/scripts/query_after_restart/dataset-with-meta-record/stop_and_start.sh b/asterixdb/asterix-installer/src/test/resources/transactionts/scripts/query_after_restart/dataset-with-meta-record/stop_and_start.sh
new file mode 100755
index 0000000..1271a2b
--- /dev/null
+++ b/asterixdb/asterix-installer/src/test/resources/transactionts/scripts/query_after_restart/dataset-with-meta-record/stop_and_start.sh
@@ -0,0 +1,2 @@
+$MANAGIX_HOME/bin/managix stop -n nc1;
+$MANAGIX_HOME/bin/managix start -n nc1;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d630d1a2/asterixdb/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/rewrites/visitor/InlineColumnAliasVisitor.java
----------------------------------------------------------------------
diff --git a/asterixdb/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/rewrites/visitor/InlineColumnAliasVisitor.java b/asterixdb/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/rewrites/visitor/InlineColumnAliasVisitor.java
new file mode 100644
index 0000000..bb8c149
--- /dev/null
+++ b/asterixdb/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/rewrites/visitor/InlineColumnAliasVisitor.java
@@ -0,0 +1,450 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.lang.sqlpp.rewrites.visitor;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Map.Entry;
+
+import org.apache.asterix.common.exceptions.AsterixException;
+import org.apache.asterix.lang.common.base.Expression;
+import org.apache.asterix.lang.common.base.Expression.Kind;
+import org.apache.asterix.lang.common.base.Literal;
+import org.apache.asterix.lang.common.clause.GroupbyClause;
+import org.apache.asterix.lang.common.clause.LetClause;
+import org.apache.asterix.lang.common.clause.LimitClause;
+import org.apache.asterix.lang.common.clause.OrderbyClause;
+import org.apache.asterix.lang.common.clause.WhereClause;
+import org.apache.asterix.lang.common.expression.CallExpr;
+import org.apache.asterix.lang.common.expression.FieldAccessor;
+import org.apache.asterix.lang.common.expression.FieldBinding;
+import org.apache.asterix.lang.common.expression.GbyVariableExpressionPair;
+import org.apache.asterix.lang.common.expression.IfExpr;
+import org.apache.asterix.lang.common.expression.IndexAccessor;
+import org.apache.asterix.lang.common.expression.ListConstructor;
+import org.apache.asterix.lang.common.expression.LiteralExpr;
+import org.apache.asterix.lang.common.expression.OperatorExpr;
+import org.apache.asterix.lang.common.expression.QuantifiedExpression;
+import org.apache.asterix.lang.common.expression.RecordConstructor;
+import org.apache.asterix.lang.common.expression.UnaryExpr;
+import org.apache.asterix.lang.common.expression.VariableExpr;
+import org.apache.asterix.lang.common.parser.ScopeChecker;
+import org.apache.asterix.lang.common.rewrites.LangRewritingContext;
+import org.apache.asterix.lang.common.rewrites.VariableSubstitutionEnvironment;
+import org.apache.asterix.lang.common.statement.FunctionDecl;
+import org.apache.asterix.lang.common.statement.Query;
+import org.apache.asterix.lang.common.struct.QuantifiedPair;
+import org.apache.asterix.lang.sqlpp.clause.AbstractBinaryCorrelateClause;
+import org.apache.asterix.lang.sqlpp.clause.FromClause;
+import org.apache.asterix.lang.sqlpp.clause.FromTerm;
+import org.apache.asterix.lang.sqlpp.clause.HavingClause;
+import org.apache.asterix.lang.sqlpp.clause.JoinClause;
+import org.apache.asterix.lang.sqlpp.clause.NestClause;
+import org.apache.asterix.lang.sqlpp.clause.Projection;
+import org.apache.asterix.lang.sqlpp.clause.SelectBlock;
+import org.apache.asterix.lang.sqlpp.clause.SelectClause;
+import org.apache.asterix.lang.sqlpp.clause.SelectElement;
+import org.apache.asterix.lang.sqlpp.clause.SelectRegular;
+import org.apache.asterix.lang.sqlpp.clause.SelectSetOperation;
+import org.apache.asterix.lang.sqlpp.clause.UnnestClause;
+import org.apache.asterix.lang.sqlpp.expression.SelectExpression;
+import org.apache.asterix.lang.sqlpp.struct.SetOperationRight;
+import org.apache.asterix.lang.sqlpp.util.SqlppRewriteUtil;
+import org.apache.asterix.lang.sqlpp.util.SqlppVariableSubstitutionUtil;
+import org.apache.asterix.lang.sqlpp.util.SqlppVariableUtil;
+import org.apache.asterix.lang.sqlpp.visitor.base.AbstractSqlppQueryExpressionVisitor;
+
+public class InlineColumnAliasVisitor extends AbstractSqlppQueryExpressionVisitor<Void, Boolean> {
+
+    private final ScopeChecker scopeChecker = new ScopeChecker();
+    private final LangRewritingContext context;
+
+    public InlineColumnAliasVisitor(LangRewritingContext context) {
+        this.context = context;
+    }
+
+    @Override
+    public Void visit(WhereClause whereClause, Boolean arg) throws AsterixException {
+        whereClause.getWhereExpr().accept(this, arg);
+        return null;
+    }
+
+    @Override
+    public Void visit(FromClause fromClause, Boolean arg) throws AsterixException {
+        for (FromTerm fromTerm : fromClause.getFromTerms()) {
+            fromTerm.accept(this, arg);
+        }
+        return null;
+    }
+
+    @Override
+    public Void visit(FromTerm fromTerm, Boolean arg) throws AsterixException {
+        fromTerm.getLeftExpression().accept(this, arg);
+        // A from binding variable will override the alias to substitute.
+        scopeChecker.getCurrentScope().removeSymbolExpressionMapping(fromTerm.getLeftVariable());
+        if (fromTerm.hasPositionalVariable()) {
+            scopeChecker.getCurrentScope().removeSymbolExpressionMapping(fromTerm.getPositionalVariable());
+        }
+
+        for (AbstractBinaryCorrelateClause correlate : fromTerm.getCorrelateClauses()) {
+            correlate.accept(this, arg);
+        }
+        return null;
+    }
+
+    @Override
+    public Void visit(JoinClause joinClause, Boolean arg) throws AsterixException {
+        joinClause.getRightExpression().accept(this, arg);
+        removeSubsutitions(joinClause);
+        joinClause.getConditionExpression().accept(this, arg);
+        return null;
+    }
+
+    @Override
+    public Void visit(NestClause nestClause, Boolean arg) throws AsterixException {
+        nestClause.getRightExpression().accept(this, arg);
+        nestClause.getConditionExpression().accept(this, arg);
+        removeSubsutitions(nestClause);
+        return null;
+    }
+
+    @Override
+    public Void visit(UnnestClause unnestClause, Boolean arg) throws AsterixException {
+        unnestClause.getRightExpression().accept(this, arg);
+        removeSubsutitions(unnestClause);
+        return null;
+    }
+
+    @Override
+    public Void visit(Projection projection, Boolean arg) throws AsterixException {
+        projection.getExpression().accept(this, arg);
+        VariableExpr columnAlias = new VariableExpr(
+                SqlppVariableUtil.toInternalVariableIdentifier(projection.getName()));
+        VariableSubstitutionEnvironment env = scopeChecker.getCurrentScope().getVarSubstitutionEnvironment();
+        Expression gbyKey = (Expression) SqlppRewriteUtil.deepCopy(env.findSubstituion(columnAlias));
+        if (arg) {
+            scopeChecker.getCurrentScope().addSymbolExpressionMappingToScope(columnAlias, projection.getExpression());
+        } else {
+            if (gbyKey != null) {
+                projection.setExpression(gbyKey);
+            }
+        }
+        return null;
+    }
+
+    @Override
+    public Void visit(SelectBlock selectBlock, Boolean arg) throws AsterixException {
+        // Traverses the select block in the order of "select", "group-by",
+        // "group-by" lets and "having".
+        selectBlock.getSelectClause().accept(this, true);
+
+        if (selectBlock.hasFromClause()) {
+            selectBlock.getFromClause().accept(this, arg);
+        }
+        if (selectBlock.hasLetClauses()) {
+            for (LetClause letClause : selectBlock.getLetList()) {
+                letClause.accept(this, arg);
+            }
+        }
+        if (selectBlock.hasGroupbyClause()) {
+            selectBlock.getGroupbyClause().accept(this, arg);
+        }
+        if (selectBlock.hasLetClausesAfterGroupby()) {
+            for (LetClause letClauseAfterGby : selectBlock.getLetListAfterGroupby()) {
+                letClauseAfterGby.accept(this, true);
+            }
+        }
+        if (selectBlock.hasHavingClause()) {
+            selectBlock.getHavingClause().accept(this, arg);
+        }
+
+        // Visit select clause again to overwrite projection expressions if the group-by clause is rewritten.
+        selectBlock.getSelectClause().accept(this, false);
+        return null;
+    }
+
+    @Override
+    public Void visit(SelectClause selectClause, Boolean arg) throws AsterixException {
+        if (selectClause.selectElement()) {
+            selectClause.getSelectElement().accept(this, arg);
+        }
+        if (selectClause.selectRegular()) {
+            selectClause.getSelectRegular().accept(this, arg);
+        }
+        return null;
+    }
+
+    @Override
+    public Void visit(SelectElement selectElement, Boolean arg) throws AsterixException {
+        Expression expr = selectElement.getExpression();
+        expr.accept(this, arg);
+        if (expr.getKind() == Kind.RECORD_CONSTRUCTOR_EXPRESSION) {
+            // To be consistent with SelectRegular.
+            mapForRecordConstructor(arg, (RecordConstructor) expr);
+        }
+        return null;
+    }
+
+    @Override
+    public Void visit(SelectRegular selectRegular, Boolean arg) throws AsterixException {
+        for (Projection projection : selectRegular.getProjections()) {
+            projection.accept(this, arg);
+        }
+        return null;
+    }
+
+    @Override
+    public Void visit(SelectSetOperation selectSetOperation, Boolean arg) throws AsterixException {
+        selectSetOperation.getLeftInput().accept(this, arg);
+        for (SetOperationRight right : selectSetOperation.getRightInputs()) {
+            right.getSetOperationRightInput().accept(this, arg);
+        }
+        return null;
+    }
+
+    @Override
+    public Void visit(SelectExpression selectExpression, Boolean arg) throws AsterixException {
+        scopeChecker.createNewScope();
+
+        // Visits let bindings.
+        if (selectExpression.hasLetClauses()) {
+            for (LetClause lc : selectExpression.getLetList()) {
+                lc.accept(this, arg);
+            }
+        }
+
+        // Visits selectSetOperation.
+        selectExpression.getSelectSetOperation().accept(this, arg);
+
+        // Visits order by.
+        if (selectExpression.hasOrderby()) {
+            selectExpression.getOrderbyClause().accept(this, arg);
+        }
+
+        // Visits limit.
+        if (selectExpression.hasLimit()) {
+            selectExpression.getLimitClause().accept(this, arg);
+        }
+
+        // Exits the scope that were entered within this select expression
+        scopeChecker.removeCurrentScope();
+        return null;
+    }
+
+    @Override
+    public Void visit(LetClause letClause, Boolean rewrite) throws AsterixException {
+        VariableSubstitutionEnvironment env = scopeChecker.getCurrentScope().getVarSubstitutionEnvironment();
+        if (rewrite) {
+            Expression newBindExpr = (Expression) SqlppVariableSubstitutionUtil
+                    .substituteVariableWithoutContext(letClause.getBindingExpr(), env);
+            letClause.setBindingExpr(newBindExpr);
+        }
+        letClause.getBindingExpr().accept(this, false);
+        // A let binding variable will override the alias to substitute.
+        scopeChecker.getCurrentScope().removeSymbolExpressionMapping(letClause.getVarExpr());
+        return null;
+    }
+
+    @Override
+    public Void visit(OrderbyClause oc, Boolean arg) throws AsterixException {
+        VariableSubstitutionEnvironment env = scopeChecker.getCurrentScope().getVarSubstitutionEnvironment();
+        List<Expression> orderExprs = new ArrayList<Expression>();
+        for (Expression orderExpr : oc.getOrderbyList()) {
+            orderExprs.add((Expression) SqlppVariableSubstitutionUtil.substituteVariableWithoutContext(orderExpr, env));
+            orderExpr.accept(this, arg);
+        }
+        oc.setOrderbyList(orderExprs);
+        return null;
+    }
+
+    @Override
+    public Void visit(GroupbyClause gc, Boolean arg) throws AsterixException {
+        VariableSubstitutionEnvironment env = scopeChecker.getCurrentScope().getVarSubstitutionEnvironment();
+        Map<VariableExpr, VariableExpr> oldGbyExprsToNewGbyVarMap = new HashMap<>();
+        for (GbyVariableExpressionPair gbyVarExpr : gc.getGbyPairList()) {
+            Expression oldGbyExpr = gbyVarExpr.getExpr();
+            Expression newExpr = (Expression) SqlppVariableSubstitutionUtil.substituteVariableWithoutContext(oldGbyExpr,
+                    env);
+            newExpr.accept(this, arg);
+            gbyVarExpr.setExpr(newExpr);
+            if (gbyVarExpr.getVar() == null) {
+                gbyVarExpr.setVar(new VariableExpr(context.newVariable()));
+            }
+            if (oldGbyExpr.getKind() == Kind.VARIABLE_EXPRESSION) {
+                VariableExpr oldGbyVarExpr = (VariableExpr) oldGbyExpr;
+                if (env.findSubstituion(oldGbyVarExpr) != null) {
+                    // Re-mapping that needs to be added.
+                    oldGbyExprsToNewGbyVarMap.put(oldGbyVarExpr, gbyVarExpr.getVar());
+                }
+            }
+        }
+        for (Entry<VariableExpr, VariableExpr> entry : oldGbyExprsToNewGbyVarMap.entrySet()) {
+            // The group-by key variable will override the alias to substitute.
+            scopeChecker.getCurrentScope().removeSymbolExpressionMapping(entry.getKey());
+            scopeChecker.getCurrentScope().addSymbolExpressionMappingToScope(entry.getKey(), entry.getValue());
+        }
+        return null;
+    }
+
+    @Override
+    public Void visit(LimitClause limitClause, Boolean arg) throws AsterixException {
+        limitClause.getLimitExpr().accept(this, arg);
+        return null;
+    }
+
+    @Override
+    public Void visit(HavingClause havingClause, Boolean arg) throws AsterixException {
+        VariableSubstitutionEnvironment env = scopeChecker.getCurrentScope().getVarSubstitutionEnvironment();
+        Expression newFilterExpr = (Expression) SqlppVariableSubstitutionUtil
+                .substituteVariableWithoutContext(havingClause.getFilterExpression(), env);
+        newFilterExpr.accept(this, arg);
+        havingClause.setFilterExpression(newFilterExpr);
+        return null;
+    }
+
+    @Override
+    public Void visit(Query q, Boolean arg) throws AsterixException {
+        q.getBody().accept(this, arg);
+        return null;
+    }
+
+    @Override
+    public Void visit(FunctionDecl fd, Boolean arg) throws AsterixException {
+        scopeChecker.createNewScope();
+        fd.getFuncBody().accept(this, arg);
+        scopeChecker.removeCurrentScope();
+        return null;
+    }
+
+    @Override
+    public Void visit(LiteralExpr l, Boolean arg) throws AsterixException {
+        return null;
+    }
+
+    @Override
+    public Void visit(ListConstructor lc, Boolean arg) throws AsterixException {
+        for (Expression expr : lc.getExprList()) {
+            expr.accept(this, arg);
+        }
+        return null;
+    }
+
+    @Override
+    public Void visit(RecordConstructor rc, Boolean rewrite) throws AsterixException {
+        for (FieldBinding binding : rc.getFbList()) {
+            binding.getLeftExpr().accept(this, false);
+            binding.getRightExpr().accept(this, false);
+        }
+        return null;
+    }
+
+    private void mapForRecordConstructor(Boolean initPhase, RecordConstructor rc) throws AsterixException {
+        for (FieldBinding binding : rc.getFbList()) {
+            Expression leftExpr = binding.getLeftExpr();
+            if (leftExpr.getKind() == Kind.LITERAL_EXPRESSION) {
+                LiteralExpr literalExpr = (LiteralExpr) leftExpr;
+                if (literalExpr.getValue().getLiteralType() == Literal.Type.STRING) {
+                    String fieldName = literalExpr.getValue().getStringValue();
+                    VariableExpr columnAlias = new VariableExpr(
+                            SqlppVariableUtil.toInternalVariableIdentifier(fieldName));
+                    VariableSubstitutionEnvironment env = scopeChecker.getCurrentScope()
+                            .getVarSubstitutionEnvironment();
+                    if (initPhase) {
+                        scopeChecker.getCurrentScope().addSymbolExpressionMappingToScope(columnAlias,
+                                binding.getRightExpr());
+                    } else {
+                        Expression gbyKey = (Expression) SqlppRewriteUtil.deepCopy(env.findSubstituion(columnAlias));
+                        if (gbyKey != null) {
+                            binding.setRightExpr(gbyKey);
+                        }
+                    }
+                }
+            }
+        }
+    }
+
+    @Override
+    public Void visit(OperatorExpr operatorExpr, Boolean arg) throws AsterixException {
+        for (Expression expr : operatorExpr.getExprList()) {
+            expr.accept(this, arg);
+        }
+        return null;
+    }
+
+    @Override
+    public Void visit(IfExpr ifExpr, Boolean arg) throws AsterixException {
+        ifExpr.getCondExpr().accept(this, arg);
+        ifExpr.getThenExpr().accept(this, arg);
+        ifExpr.getElseExpr().accept(this, arg);
+        return null;
+    }
+
+    @Override
+    public Void visit(QuantifiedExpression qe, Boolean arg) throws AsterixException {
+        for (QuantifiedPair pair : qe.getQuantifiedList()) {
+            pair.getExpr().accept(this, arg);
+        }
+        qe.getSatisfiesExpr().accept(this, arg);
+        return null;
+    }
+
+    @Override
+    public Void visit(CallExpr callExpr, Boolean arg) throws AsterixException {
+        for (Expression expr : callExpr.getExprList()) {
+            expr.accept(this, arg);
+        }
+        return null;
+    }
+
+    @Override
+    public Void visit(VariableExpr varExpr, Boolean arg) throws AsterixException {
+        return null;
+    }
+
+    @Override
+    public Void visit(UnaryExpr u, Boolean arg) throws AsterixException {
+        u.getExpr().accept(this, arg);
+        return null;
+    }
+
+    @Override
+    public Void visit(FieldAccessor fa, Boolean arg) throws AsterixException {
+        fa.getExpr().accept(this, arg);
+        return null;
+    }
+
+    @Override
+    public Void visit(IndexAccessor ia, Boolean arg) throws AsterixException {
+        ia.getExpr().accept(this, arg);
+        Expression indexExpr = ia.getExpr();
+        if (indexExpr != null) {
+            indexExpr.accept(this, arg);
+        }
+        return null;
+    }
+
+    private void removeSubsutitions(AbstractBinaryCorrelateClause unnestClause) {
+        scopeChecker.getCurrentScope().removeSymbolExpressionMapping(unnestClause.getRightVariable());
+        if (unnestClause.hasPositionalVariable()) {
+            scopeChecker.getCurrentScope().removeSymbolExpressionMapping(unnestClause.getPositionalVariable());
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d630d1a2/asterixdb/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/rewrites/visitor/SqlppBuiltinFunctionRewriteVisitor.java
----------------------------------------------------------------------
diff --git a/asterixdb/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/rewrites/visitor/SqlppBuiltinFunctionRewriteVisitor.java b/asterixdb/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/rewrites/visitor/SqlppBuiltinFunctionRewriteVisitor.java
new file mode 100644
index 0000000..c7c7d11
--- /dev/null
+++ b/asterixdb/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/rewrites/visitor/SqlppBuiltinFunctionRewriteVisitor.java
@@ -0,0 +1,46 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.lang.sqlpp.rewrites.visitor;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.asterix.common.exceptions.AsterixException;
+import org.apache.asterix.common.functions.FunctionSignature;
+import org.apache.asterix.lang.common.base.Expression;
+import org.apache.asterix.lang.common.expression.CallExpr;
+import org.apache.asterix.lang.sqlpp.util.FunctionMapUtil;
+import org.apache.asterix.lang.sqlpp.visitor.base.AbstractSqlppSimpleExpressionVisitor;
+
+public class SqlppBuiltinFunctionRewriteVisitor extends AbstractSqlppSimpleExpressionVisitor {
+
+    @Override
+    public Expression visit(CallExpr callExpr, Expression arg) throws AsterixException {
+        //TODO(buyingyi): rewrite SQL temporal functions
+        FunctionSignature functionSignature = callExpr.getFunctionSignature();
+        callExpr.setFunctionSignature(FunctionMapUtil.normalizeBuiltinFunctionSignature(functionSignature, true));
+        List<Expression> newExprList = new ArrayList<Expression>();
+        for (Expression expr : callExpr.getExprList()) {
+            newExprList.add(expr.accept(this, arg));
+        }
+        callExpr.setExprList(newExprList);
+        return callExpr;
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d630d1a2/asterixdb/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/rewrites/visitor/SqlppGlobalAggregationSugarVisitor.java
----------------------------------------------------------------------
diff --git a/asterixdb/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/rewrites/visitor/SqlppGlobalAggregationSugarVisitor.java b/asterixdb/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/rewrites/visitor/SqlppGlobalAggregationSugarVisitor.java
new file mode 100644
index 0000000..ae629af
--- /dev/null
+++ b/asterixdb/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/rewrites/visitor/SqlppGlobalAggregationSugarVisitor.java
@@ -0,0 +1,67 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.lang.sqlpp.rewrites.visitor;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.asterix.common.exceptions.AsterixException;
+import org.apache.asterix.lang.common.base.Expression;
+import org.apache.asterix.lang.common.base.ILangExpression;
+import org.apache.asterix.lang.common.clause.GroupbyClause;
+import org.apache.asterix.lang.common.expression.GbyVariableExpressionPair;
+import org.apache.asterix.lang.common.expression.LiteralExpr;
+import org.apache.asterix.lang.common.expression.VariableExpr;
+import org.apache.asterix.lang.common.literal.IntegerLiteral;
+import org.apache.asterix.lang.sqlpp.clause.SelectBlock;
+import org.apache.asterix.lang.sqlpp.clause.SelectClause;
+import org.apache.asterix.lang.sqlpp.visitor.CheckSql92AggregateVisitor;
+import org.apache.asterix.lang.sqlpp.visitor.base.AbstractSqlppSimpleExpressionVisitor;
+
+public class SqlppGlobalAggregationSugarVisitor extends AbstractSqlppSimpleExpressionVisitor {
+
+    @Override
+    public Expression visit(SelectBlock selectBlock, Expression arg) throws AsterixException {
+        SelectClause selectClause = selectBlock.getSelectClause();
+        if (!selectBlock.hasGroupbyClause() && selectBlock.hasFromClause()) {
+            boolean addImplicitGby = false;
+            if (selectClause.selectRegular()) {
+                addImplicitGby = isSql92Aggregate(selectClause.getSelectRegular(), selectBlock);
+            } else {
+                addImplicitGby = isSql92Aggregate(selectClause.getSelectElement(), selectBlock);
+            }
+            if (addImplicitGby) {
+                // Adds an implicit group-by clause for SQL-92 global aggregate.
+                List<GbyVariableExpressionPair> gbyPairList = new ArrayList<>();
+                gbyPairList.add(new GbyVariableExpressionPair(null, new LiteralExpr(new IntegerLiteral(1))));
+                List<GbyVariableExpressionPair> decorPairList = new ArrayList<>();
+                List<VariableExpr> withVarList = new ArrayList<>();
+                GroupbyClause gbyClause = new GroupbyClause(gbyPairList, decorPairList, withVarList, null, null, false,
+                        true);
+                selectBlock.setGroupbyClause(gbyClause);
+            }
+        }
+        return super.visit(selectBlock, arg);
+    }
+
+    private boolean isSql92Aggregate(ILangExpression expr, SelectBlock selectBlock) throws AsterixException {
+        CheckSql92AggregateVisitor visitor = new CheckSql92AggregateVisitor();
+        return expr.accept(visitor, selectBlock);
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d630d1a2/asterixdb/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/rewrites/visitor/SqlppGroupBySugarVisitor.java
----------------------------------------------------------------------
diff --git a/asterixdb/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/rewrites/visitor/SqlppGroupBySugarVisitor.java b/asterixdb/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/rewrites/visitor/SqlppGroupBySugarVisitor.java
new file mode 100644
index 0000000..ae47264
--- /dev/null
+++ b/asterixdb/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/rewrites/visitor/SqlppGroupBySugarVisitor.java
@@ -0,0 +1,123 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.lang.sqlpp.rewrites.visitor;
+
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+
+import org.apache.asterix.common.exceptions.AsterixException;
+import org.apache.asterix.common.functions.FunctionSignature;
+import org.apache.asterix.lang.common.base.Expression;
+import org.apache.asterix.lang.common.base.Expression.Kind;
+import org.apache.asterix.lang.common.expression.CallExpr;
+import org.apache.asterix.lang.common.expression.FieldAccessor;
+import org.apache.asterix.lang.common.expression.VariableExpr;
+import org.apache.asterix.lang.common.rewrites.LangRewritingContext;
+import org.apache.asterix.lang.sqlpp.clause.FromClause;
+import org.apache.asterix.lang.sqlpp.clause.FromTerm;
+import org.apache.asterix.lang.sqlpp.clause.SelectBlock;
+import org.apache.asterix.lang.sqlpp.clause.SelectClause;
+import org.apache.asterix.lang.sqlpp.clause.SelectElement;
+import org.apache.asterix.lang.sqlpp.clause.SelectSetOperation;
+import org.apache.asterix.lang.sqlpp.expression.SelectExpression;
+import org.apache.asterix.lang.sqlpp.struct.SetOperationInput;
+import org.apache.asterix.lang.sqlpp.util.FunctionMapUtil;
+import org.apache.asterix.lang.sqlpp.util.SqlppRewriteUtil;
+import org.apache.asterix.lang.sqlpp.util.SqlppVariableSubstitutionUtil;
+import org.apache.asterix.lang.sqlpp.util.SqlppVariableUtil;
+import org.apache.asterix.lang.sqlpp.visitor.base.AbstractSqlppExpressionScopingVisitor;
+
+/**
+ * An AST pre-processor to rewrite group-by sugar queries.
+ */
+public class SqlppGroupBySugarVisitor extends AbstractSqlppExpressionScopingVisitor {
+
+    private final Expression groupVar;
+    private final Collection<VariableExpr> targetVars;
+
+    public SqlppGroupBySugarVisitor(LangRewritingContext context, Expression groupVar,
+            Collection<VariableExpr> targetVars) {
+        super(context);
+        this.groupVar = groupVar;
+        this.targetVars = targetVars;
+    }
+
+    @Override
+    public Expression visit(CallExpr callExpr, Expression arg) throws AsterixException {
+        List<Expression> newExprList = new ArrayList<Expression>();
+        FunctionSignature signature = callExpr.getFunctionSignature();
+        boolean aggregate = FunctionMapUtil.isSql92AggregateFunction(signature)
+                || FunctionMapUtil.isCoreAggregateFunction(signature);
+        boolean rewritten = false;
+        for (Expression expr : callExpr.getExprList()) {
+            Expression newExpr = aggregate ? wrapAggregationArgument(expr) : expr;
+            rewritten |= newExpr != expr;
+            newExprList.add(newExpr.accept(this, arg));
+        }
+        if (rewritten) {
+            // Rewrites the SQL-92 function name to core functions.
+            callExpr.setFunctionSignature(FunctionMapUtil.sql92ToCoreAggregateFunction(signature));
+        }
+        callExpr.setExprList(newExprList);
+        return callExpr;
+    }
+
+    private Expression wrapAggregationArgument(Expression expr) throws AsterixException {
+        if (expr.getKind() == Kind.SELECT_EXPRESSION) {
+            return expr;
+        }
+        Set<VariableExpr> definedVars = scopeChecker.getCurrentScope().getLiveVariables();
+        Set<VariableExpr> vars = new HashSet<>(targetVars);
+        vars.remove(definedVars); // Exclude re-defined local variables.
+        Set<VariableExpr> freeVars = SqlppRewriteUtil.getFreeVariable(expr);
+        if (!vars.containsAll(freeVars)) {
+            return expr;
+        }
+
+        VariableExpr var = new VariableExpr(context.newVariable());
+        FromTerm fromTerm = new FromTerm(groupVar, var, null, null);
+        FromClause fromClause = new FromClause(Collections.singletonList(fromTerm));
+
+        // Select clause.
+        SelectElement selectElement = new SelectElement(expr);
+        SelectClause selectClause = new SelectClause(selectElement, null, false);
+
+        // Construct the select expression.
+        SelectBlock selectBlock = new SelectBlock(selectClause, fromClause, null, null, null, null, null);
+        SelectSetOperation selectSetOperation = new SelectSetOperation(new SetOperationInput(selectBlock, null), null);
+        SelectExpression selectExpression = new SelectExpression(null, selectSetOperation, null, null, false);
+        selectExpression.setSubquery(true);
+
+        // replace variable expressions with field access
+        Map<VariableExpr, Expression> varExprMap = new HashMap<>();
+        for (VariableExpr usedVar : freeVars) {
+            varExprMap.put(usedVar,
+                    new FieldAccessor(var, SqlppVariableUtil.toUserDefinedVariableName(usedVar.getVar())));
+        }
+        selectElement.setExpression(
+                (Expression) SqlppVariableSubstitutionUtil.substituteVariableWithoutContext(expr, varExprMap));
+        return selectExpression;
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d630d1a2/asterixdb/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/rewrites/visitor/SqlppGroupByVisitor.java
----------------------------------------------------------------------
diff --git a/asterixdb/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/rewrites/visitor/SqlppGroupByVisitor.java b/asterixdb/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/rewrites/visitor/SqlppGroupByVisitor.java
new file mode 100644
index 0000000..c9e7a6e
--- /dev/null
+++ b/asterixdb/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/rewrites/visitor/SqlppGroupByVisitor.java
@@ -0,0 +1,168 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.lang.sqlpp.rewrites.visitor;
+
+import java.util.ArrayList;
+import java.util.HashSet;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Set;
+
+import org.apache.asterix.common.exceptions.AsterixException;
+import org.apache.asterix.lang.common.base.Expression;
+import org.apache.asterix.lang.common.clause.GroupbyClause;
+import org.apache.asterix.lang.common.clause.LetClause;
+import org.apache.asterix.lang.common.context.Scope;
+import org.apache.asterix.lang.common.expression.GbyVariableExpressionPair;
+import org.apache.asterix.lang.common.expression.VariableExpr;
+import org.apache.asterix.lang.common.rewrites.LangRewritingContext;
+import org.apache.asterix.lang.common.struct.Identifier;
+import org.apache.asterix.lang.common.struct.VarIdentifier;
+import org.apache.asterix.lang.sqlpp.clause.SelectBlock;
+import org.apache.asterix.lang.sqlpp.expression.SelectExpression;
+import org.apache.asterix.lang.sqlpp.util.SqlppRewriteUtil;
+import org.apache.asterix.lang.sqlpp.util.SqlppVariableUtil;
+import org.apache.asterix.lang.sqlpp.visitor.base.AbstractSqlppExpressionScopingVisitor;
+import org.apache.hyracks.algebricks.common.utils.Pair;
+
+/**
+ * A pre-processor that adds the group variable as well as its group field
+ * list into the AST. It will also invoke SQL group-by aggregation sugar rewritings.
+ */
+public class SqlppGroupByVisitor extends AbstractSqlppExpressionScopingVisitor {
+
+    public SqlppGroupByVisitor(LangRewritingContext context) {
+        super(context);
+    }
+
+    @Override
+    public Expression visit(SelectBlock selectBlock, Expression arg) throws AsterixException {
+        // Traverses the select block in the order of "from", "let"s, "where",
+        // "group by", "let"s, "having" and "select".
+        if (selectBlock.hasFromClause()) {
+            selectBlock.getFromClause().accept(this, arg);
+        }
+        if (selectBlock.hasLetClauses()) {
+            List<LetClause> letList = selectBlock.getLetList();
+            for (LetClause letClause : letList) {
+                letClause.accept(this, arg);
+            }
+        }
+        if (selectBlock.hasWhereClause()) {
+            selectBlock.getWhereClause().accept(this, arg);
+        }
+        if (selectBlock.hasGroupbyClause()) {
+            selectBlock.getGroupbyClause().accept(this, arg);
+            Set<VariableExpr> withVarSet = new HashSet<>(selectBlock.getGroupbyClause().getWithVarList());
+            withVarSet.remove(selectBlock.getGroupbyClause().getGroupVar());
+            if (selectBlock.hasLetClausesAfterGroupby()) {
+                List<LetClause> letListAfterGby = selectBlock.getLetListAfterGroupby();
+                for (LetClause letClauseAfterGby : letListAfterGby) {
+                    // Rewrites each let clause after the group-by.
+                    SqlppRewriteUtil.rewriteExpressionUsingGroupVariable(selectBlock.getGroupbyClause().getGroupVar(),
+                            withVarSet, letClauseAfterGby, context);
+                    letClauseAfterGby.accept(this, arg);
+                }
+            }
+            if (selectBlock.hasHavingClause()) {
+                // Rewrites the having clause.
+                SqlppRewriteUtil.rewriteExpressionUsingGroupVariable(selectBlock.getGroupbyClause().getGroupVar(),
+                        withVarSet, selectBlock.getHavingClause(), context);
+                selectBlock.getHavingClause().accept(this, arg);
+            }
+            // Rewrites the select clause.
+            SqlppRewriteUtil.rewriteExpressionUsingGroupVariable(selectBlock.getGroupbyClause().getGroupVar(),
+                    withVarSet, selectBlock.getSelectClause(), context);
+
+            SelectExpression parentSelectExpression = (SelectExpression) arg;
+            if (parentSelectExpression.hasOrderby()) {
+                // Rewrites the order-by clause.
+                SqlppRewriteUtil.rewriteExpressionUsingGroupVariable(selectBlock.getGroupbyClause().getGroupVar(),
+                        withVarSet, parentSelectExpression.getOrderbyClause(), context);
+            }
+            if (parentSelectExpression.hasLimit()) {
+                // Rewrites the limit clause.
+                SqlppRewriteUtil.rewriteExpressionUsingGroupVariable(selectBlock.getGroupbyClause().getGroupVar(),
+                        withVarSet, parentSelectExpression.getLimitClause(), context);
+            }
+        }
+        selectBlock.getSelectClause().accept(this, arg);
+        return null;
+    }
+
+    @Override
+    public Expression visit(GroupbyClause gc, Expression arg) throws AsterixException {
+        Scope newScope = scopeChecker.extendCurrentScopeNoPush(true);
+        // Puts all group-by variables into the symbol set of the new scope.
+        for (GbyVariableExpressionPair gbyVarExpr : gc.getGbyPairList()) {
+            gbyVarExpr.setExpr(gbyVarExpr.getExpr().accept(this, arg));
+            VariableExpr gbyVar = gbyVarExpr.getVar();
+            if (gbyVar != null) {
+                newScope.addNewVarSymbolToScope(gbyVarExpr.getVar().getVar());
+            }
+        }
+        // Puts all live variables into withVarList.
+        List<VariableExpr> withVarList = new ArrayList<VariableExpr>();
+        Iterator<Identifier> varIterator = scopeChecker.getCurrentScope().liveSymbols();
+        while (varIterator.hasNext()) {
+            Identifier ident = varIterator.next();
+            VariableExpr varExpr = new VariableExpr();
+            if (ident instanceof VarIdentifier) {
+                varExpr.setIsNewVar(false);
+                varExpr.setVar((VarIdentifier) ident);
+                withVarList.add(varExpr);
+                newScope.addNewVarSymbolToScope((VarIdentifier) ident);
+            }
+        }
+
+        // Sets the field list for the group variable.
+        List<Pair<Expression, Identifier>> groupFieldList = new ArrayList<>();
+        if (!gc.hasGroupFieldList()) {
+            for (VariableExpr varExpr : withVarList) {
+                Pair<Expression, Identifier> varIdPair = new Pair<>(new VariableExpr(varExpr.getVar()),
+                        SqlppVariableUtil.toUserDefinedVariableName(varExpr.getVar()));
+                groupFieldList.add(varIdPair);
+            }
+            gc.setGroupFieldList(groupFieldList);
+        } else {
+            // Check the scopes of group field variables.
+            for (Pair<Expression, Identifier> groupField : gc.getGroupFieldList()) {
+                Expression newVar = groupField.first.accept(this, arg);
+                groupFieldList.add(new Pair<>(newVar, groupField.second));
+            }
+        }
+        gc.setGroupFieldList(groupFieldList);
+
+        // Sets the group variable.
+        if (!gc.hasGroupVar()) {
+            VariableExpr groupVar = new VariableExpr(context.newVariable());
+            gc.setGroupVar(groupVar);
+        }
+        newScope.addNewVarSymbolToScope(gc.getGroupVar().getVar());
+
+        // Adds the group variable into the "with" (i.e., re-binding) variable list.
+        VariableExpr gbyVarRef = new VariableExpr(gc.getGroupVar().getVar());
+        gbyVarRef.setIsNewVar(false);
+        withVarList.add(gbyVarRef);
+        gc.setWithVarList(withVarList);
+
+        scopeChecker.replaceCurrentScope(newScope);
+        return null;
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d630d1a2/asterixdb/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/rewrites/visitor/SqlppInlineUdfsVisitor.java
----------------------------------------------------------------------
diff --git a/asterixdb/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/rewrites/visitor/SqlppInlineUdfsVisitor.java b/asterixdb/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/rewrites/visitor/SqlppInlineUdfsVisitor.java
new file mode 100644
index 0000000..e7832bb
--- /dev/null
+++ b/asterixdb/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/rewrites/visitor/SqlppInlineUdfsVisitor.java
@@ -0,0 +1,237 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.lang.sqlpp.rewrites.visitor;
+
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import org.apache.asterix.common.exceptions.AsterixException;
+import org.apache.asterix.lang.common.base.Expression;
+import org.apache.asterix.lang.common.base.IRewriterFactory;
+import org.apache.asterix.lang.common.clause.LetClause;
+import org.apache.asterix.lang.common.expression.VariableExpr;
+import org.apache.asterix.lang.common.rewrites.LangRewritingContext;
+import org.apache.asterix.lang.common.statement.FunctionDecl;
+import org.apache.asterix.lang.common.visitor.AbstractInlineUdfsVisitor;
+import org.apache.asterix.lang.sqlpp.clause.AbstractBinaryCorrelateClause;
+import org.apache.asterix.lang.sqlpp.clause.FromClause;
+import org.apache.asterix.lang.sqlpp.clause.FromTerm;
+import org.apache.asterix.lang.sqlpp.clause.HavingClause;
+import org.apache.asterix.lang.sqlpp.clause.JoinClause;
+import org.apache.asterix.lang.sqlpp.clause.NestClause;
+import org.apache.asterix.lang.sqlpp.clause.Projection;
+import org.apache.asterix.lang.sqlpp.clause.SelectBlock;
+import org.apache.asterix.lang.sqlpp.clause.SelectClause;
+import org.apache.asterix.lang.sqlpp.clause.SelectElement;
+import org.apache.asterix.lang.sqlpp.clause.SelectRegular;
+import org.apache.asterix.lang.sqlpp.clause.SelectSetOperation;
+import org.apache.asterix.lang.sqlpp.clause.UnnestClause;
+import org.apache.asterix.lang.sqlpp.expression.SelectExpression;
+import org.apache.asterix.lang.sqlpp.struct.SetOperationRight;
+import org.apache.asterix.lang.sqlpp.util.SqlppVariableSubstitutionUtil;
+import org.apache.asterix.lang.sqlpp.visitor.SqlppCloneAndSubstituteVariablesVisitor;
+import org.apache.asterix.lang.sqlpp.visitor.base.ISqlppVisitor;
+import org.apache.asterix.metadata.declared.AqlMetadataProvider;
+import org.apache.hyracks.algebricks.common.utils.Pair;
+
+public class SqlppInlineUdfsVisitor extends AbstractInlineUdfsVisitor
+        implements ISqlppVisitor<Boolean, List<FunctionDecl>> {
+
+    /**
+     * @param context,
+     *            manages ids of variables and guarantees uniqueness of variables.
+     * @param rewriterFactory,
+     *            a rewrite factory for rewriting user-defined functions.
+     * @param declaredFunctions,
+     *            a list of declared functions associated with the query.
+     * @param metadataProvider,
+     *            providing the definition of created (i.e., stored) user-defined functions.
+     */
+    public SqlppInlineUdfsVisitor(LangRewritingContext context, IRewriterFactory rewriterFactory,
+            List<FunctionDecl> declaredFunctions, AqlMetadataProvider metadataProvider) {
+        super(context, rewriterFactory, declaredFunctions, metadataProvider,
+                new SqlppCloneAndSubstituteVariablesVisitor(context));
+    }
+
+    @Override
+    protected Expression generateQueryExpression(List<LetClause> letClauses, Expression returnExpr)
+            throws AsterixException {
+        Map<VariableExpr, Expression> varExprMap = extractLetBindingVariableExpressionMappings(letClauses);
+        Expression inlinedReturnExpr = (Expression) SqlppVariableSubstitutionUtil
+                .substituteVariableWithoutContext(returnExpr, varExprMap);
+        return inlinedReturnExpr;
+    }
+
+    @Override
+    public Boolean visit(FromClause fromClause, List<FunctionDecl> func) throws AsterixException {
+        boolean changed = false;
+        for (FromTerm fromTerm : fromClause.getFromTerms()) {
+            changed |= fromTerm.accept(this, func);
+        }
+        return changed;
+    }
+
+    @Override
+    public Boolean visit(FromTerm fromTerm, List<FunctionDecl> func) throws AsterixException {
+        boolean changed = false;
+        Pair<Boolean, Expression> p = inlineUdfsInExpr(fromTerm.getLeftExpression(), func);
+        fromTerm.setLeftExpression(p.second);
+        changed |= p.first;
+        for (AbstractBinaryCorrelateClause correlateClause : fromTerm.getCorrelateClauses()) {
+            changed |= correlateClause.accept(this, func);
+        }
+        return changed;
+    }
+
+    @Override
+    public Boolean visit(JoinClause joinClause, List<FunctionDecl> funcs) throws AsterixException {
+        Pair<Boolean, Expression> p1 = inlineUdfsInExpr(joinClause.getRightExpression(), funcs);
+        joinClause.setRightExpression(p1.second);
+        Pair<Boolean, Expression> p2 = inlineUdfsInExpr(joinClause.getConditionExpression(), funcs);
+        joinClause.setConditionExpression(p2.second);
+        return p1.first || p2.first;
+    }
+
+    @Override
+    public Boolean visit(NestClause nestClause, List<FunctionDecl> funcs) throws AsterixException {
+        Pair<Boolean, Expression> p1 = inlineUdfsInExpr(nestClause.getRightExpression(), funcs);
+        nestClause.setRightExpression(p1.second);
+        Pair<Boolean, Expression> p2 = inlineUdfsInExpr(nestClause.getConditionExpression(), funcs);
+        nestClause.setConditionExpression(p2.second);
+        return p1.first || p2.first;
+    }
+
+    @Override
+    public Boolean visit(Projection projection, List<FunctionDecl> funcs) throws AsterixException {
+        Pair<Boolean, Expression> p = inlineUdfsInExpr(projection.getExpression(), funcs);
+        projection.setExpression(p.second);
+        return p.first;
+    }
+
+    @Override
+    public Boolean visit(SelectBlock selectBlock, List<FunctionDecl> funcs) throws AsterixException {
+        boolean changed = false;
+        if (selectBlock.hasFromClause()) {
+            changed |= selectBlock.getFromClause().accept(this, funcs);
+        }
+        if (selectBlock.hasLetClauses()) {
+            for (LetClause letClause : selectBlock.getLetList()) {
+                changed |= letClause.accept(this, funcs);
+            }
+        }
+        if (selectBlock.hasWhereClause()) {
+            changed |= selectBlock.getWhereClause().accept(this, funcs);
+        }
+        if (selectBlock.hasGroupbyClause()) {
+            changed |= selectBlock.getGroupbyClause().accept(this, funcs);
+        }
+        if (selectBlock.hasLetClausesAfterGroupby()) {
+            for (LetClause letClause : selectBlock.getLetListAfterGroupby()) {
+                changed |= letClause.accept(this, funcs);
+            }
+        }
+        if (selectBlock.hasHavingClause()) {
+            changed |= selectBlock.getHavingClause().accept(this, funcs);
+        }
+        changed |= selectBlock.getSelectClause().accept(this, funcs);
+        return changed;
+    }
+
+    @Override
+    public Boolean visit(SelectClause selectClause, List<FunctionDecl> funcs) throws AsterixException {
+        boolean changed = false;
+        if (selectClause.selectElement()) {
+            changed |= selectClause.getSelectElement().accept(this, funcs);
+        } else {
+            changed |= selectClause.getSelectRegular().accept(this, funcs);
+        }
+        return changed;
+    }
+
+    @Override
+    public Boolean visit(SelectElement selectElement, List<FunctionDecl> funcs) throws AsterixException {
+        Pair<Boolean, Expression> p = inlineUdfsInExpr(selectElement.getExpression(), funcs);
+        selectElement.setExpression(p.second);
+        return p.first;
+    }
+
+    @Override
+    public Boolean visit(SelectRegular selectRegular, List<FunctionDecl> funcs) throws AsterixException {
+        boolean changed = false;
+        for (Projection projection : selectRegular.getProjections()) {
+            changed |= projection.accept(this, funcs);
+        }
+        return changed;
+    }
+
+    @Override
+    public Boolean visit(SelectSetOperation selectSetOperation, List<FunctionDecl> funcs) throws AsterixException {
+        boolean changed = false;
+        changed |= selectSetOperation.getLeftInput().accept(this, funcs);
+        for (SetOperationRight right : selectSetOperation.getRightInputs()) {
+            changed |= right.getSetOperationRightInput().accept(this, funcs);
+        }
+        return changed;
+    }
+
+    @Override
+    public Boolean visit(SelectExpression selectExpression, List<FunctionDecl> funcs) throws AsterixException {
+        boolean changed = false;
+        if (selectExpression.hasLetClauses()) {
+            for (LetClause letClause : selectExpression.getLetList()) {
+                changed |= letClause.accept(this, funcs);
+            }
+        }
+        changed |= selectExpression.getSelectSetOperation().accept(this, funcs);
+        if (selectExpression.hasOrderby()) {
+            changed |= selectExpression.getOrderbyClause().accept(this, funcs);
+        }
+        if (selectExpression.hasLimit()) {
+            changed |= selectExpression.getLimitClause().accept(this, funcs);
+        }
+        return changed;
+    }
+
+    @Override
+    public Boolean visit(UnnestClause unnestClause, List<FunctionDecl> funcs) throws AsterixException {
+        Pair<Boolean, Expression> p = inlineUdfsInExpr(unnestClause.getRightExpression(), funcs);
+        unnestClause.setRightExpression(p.second);
+        return p.first;
+    }
+
+    @Override
+    public Boolean visit(HavingClause havingClause, List<FunctionDecl> funcs) throws AsterixException {
+        Pair<Boolean, Expression> p = inlineUdfsInExpr(havingClause.getFilterExpression(), funcs);
+        havingClause.setFilterExpression(p.second);
+        return p.first;
+    }
+
+    private Map<VariableExpr, Expression> extractLetBindingVariableExpressionMappings(List<LetClause> letClauses)
+            throws AsterixException {
+        Map<VariableExpr, Expression> varExprMap = new HashMap<VariableExpr, Expression>();
+        for (LetClause lc : letClauses) {
+            // inline let variables one by one iteratively.
+            lc.setBindingExpr((Expression) SqlppVariableSubstitutionUtil
+                    .substituteVariableWithoutContext(lc.getBindingExpr(), varExprMap));
+            varExprMap.put(lc.getVarExpr(), lc.getBindingExpr());
+        }
+        return varExprMap;
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d630d1a2/asterixdb/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/rewrites/visitor/VariableCheckAndRewriteVisitor.java
----------------------------------------------------------------------
diff --git a/asterixdb/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/rewrites/visitor/VariableCheckAndRewriteVisitor.java b/asterixdb/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/rewrites/visitor/VariableCheckAndRewriteVisitor.java
new file mode 100644
index 0000000..5ca2533
--- /dev/null
+++ b/asterixdb/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/rewrites/visitor/VariableCheckAndRewriteVisitor.java
@@ -0,0 +1,102 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.lang.sqlpp.rewrites.visitor;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.asterix.common.config.MetadataConstants;
+import org.apache.asterix.common.exceptions.AsterixException;
+import org.apache.asterix.common.functions.FunctionSignature;
+import org.apache.asterix.lang.common.base.Expression;
+import org.apache.asterix.lang.common.expression.CallExpr;
+import org.apache.asterix.lang.common.expression.LiteralExpr;
+import org.apache.asterix.lang.common.expression.VariableExpr;
+import org.apache.asterix.lang.common.literal.StringLiteral;
+import org.apache.asterix.lang.common.rewrites.LangRewritingContext;
+import org.apache.asterix.lang.common.struct.Identifier;
+import org.apache.asterix.lang.common.struct.VarIdentifier;
+import org.apache.asterix.lang.sqlpp.util.SqlppVariableUtil;
+import org.apache.asterix.lang.sqlpp.visitor.base.AbstractSqlppExpressionScopingVisitor;
+import org.apache.asterix.metadata.declared.AqlMetadataProvider;
+
+public class VariableCheckAndRewriteVisitor extends AbstractSqlppExpressionScopingVisitor {
+
+    protected final boolean overwrite;
+    protected final AqlMetadataProvider metadataProvider;
+
+    /**
+     * @param context,
+     *            manages ids of variables and guarantees uniqueness of variables.
+     * @param overwrite,
+     *            whether rewrite unbounded variables to dataset function calls.
+     *            This flag can only be true for rewriting a top-level query.
+     *            It should be false for rewriting the body expression of a user-defined function.
+     */
+    public VariableCheckAndRewriteVisitor(LangRewritingContext context, boolean overwrite,
+            AqlMetadataProvider metadataProvider) {
+        super(context);
+        this.overwrite = overwrite;
+        this.metadataProvider = metadataProvider;
+    }
+
+    @Override
+    public Expression visit(VariableExpr varExpr, Expression arg) throws AsterixException {
+        String varName = varExpr.getVar().getValue();
+        if (scopeChecker.isInForbiddenScopes(varName)) {
+            throw new AsterixException(
+                    "Inside limit clauses, it is disallowed to reference a variable having the same name as any variable bound in the same scope as the limit clause.");
+        }
+        if (rewriteNeeded(varExpr)) {
+            return datasetRewrite(varExpr);
+        } else {
+            return varExpr;
+        }
+    }
+
+    // Whether a rewrite is needed for a variable reference expression.
+    private boolean rewriteNeeded(VariableExpr varExpr) throws AsterixException {
+        String varName = varExpr.getVar().getValue();
+        Identifier ident = scopeChecker.lookupSymbol(varName);
+        if (ident != null) {
+            // Exists such an identifier
+            varExpr.setIsNewVar(false);
+            varExpr.setVar((VarIdentifier) ident);
+            return false;
+        } else {
+            // Meets a undefined variable
+            return true;
+        }
+    }
+
+    // Rewrites for global variable (e.g., dataset) references.
+    private Expression datasetRewrite(VariableExpr expr) throws AsterixException {
+        if (!overwrite) {
+            return expr;
+        }
+        String funcName = "dataset";
+        String dataverse = MetadataConstants.METADATA_DATAVERSE_NAME;
+        FunctionSignature signature = new FunctionSignature(dataverse, funcName, 1);
+        List<Expression> argList = new ArrayList<Expression>();
+        //Ignore the parser-generated prefix "$" for a dataset.
+        String dataset = SqlppVariableUtil.toUserDefinedVariableName(expr.getVar()).getValue();
+        argList.add(new LiteralExpr(new StringLiteral(dataset)));
+        return new CallExpr(signature, argList);
+    }
+}


[49/50] [abbrv] incubator-asterixdb git commit: Merge remote-tracking branch 'hyracks-local/master' into hyracks-merge2

Posted by im...@apache.org.
Merge remote-tracking branch 'hyracks-local/master' into hyracks-merge2


Project: http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/commit/e928b6ac
Tree: http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/tree/e928b6ac
Diff: http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/diff/e928b6ac

Branch: refs/heads/master
Commit: e928b6acd869c8bccd907c1979ef98ceb9a7c418
Parents: d630d1a 3f84996
Author: Ian Maxon <im...@apache.org>
Authored: Wed Apr 6 19:57:26 2016 -0700
Committer: Ian Maxon <im...@apache.org>
Committed: Wed Apr 6 19:57:26 2016 -0700

----------------------------------------------------------------------
 .../control/nc/NodeControllerService.java       |  9 ++++-
 .../std/file/FileRemoveOperatorDescriptor.java  | 26 ++++++++++---
 .../lsm/btree/impls/ExternalBTreeOpContext.java |  5 ++-
 .../impls/ExternalBTreeWithBuddyOpContext.java  |  5 ++-
 .../am/lsm/btree/impls/LSMBTreeOpContext.java   |  5 ++-
 .../storage/am/lsm/common/api/ILSMIndex.java    |  6 +++
 .../common/api/ILSMIndexOperationContext.java   |  7 ++++
 .../am/lsm/common/impls/AbstractLSMIndex.java   | 25 ++++++------
 .../lsm/common/impls/ExternalIndexHarness.java  | 27 +++++++++----
 .../storage/am/lsm/common/impls/LSMHarness.java | 20 +++++++++-
 .../lsm/common/impls/LSMIndexSearchCursor.java  |  4 +-
 .../impls/LSMInvertedIndexOpContext.java        |  5 ++-
 .../lsm/rtree/impls/ExternalRTreeOpContext.java |  5 ++-
 .../am/lsm/rtree/impls/LSMRTreeOpContext.java   |  5 ++-
 .../impls/AbstractLSMIndexOperationContext.java | 41 ++++++++++++++++++++
 15 files changed, 155 insertions(+), 40 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e928b6ac/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/NodeControllerService.java
----------------------------------------------------------------------
diff --cc hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/NodeControllerService.java
index 7b5758c,0000000..598d6db
mode 100644,000000..100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/NodeControllerService.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/NodeControllerService.java
@@@ -1,594 -1,0 +1,599 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +package org.apache.hyracks.control.nc;
 +
 +import java.io.File;
 +import java.lang.management.GarbageCollectorMXBean;
 +import java.lang.management.ManagementFactory;
 +import java.lang.management.MemoryMXBean;
 +import java.lang.management.MemoryUsage;
 +import java.lang.management.OperatingSystemMXBean;
 +import java.lang.management.RuntimeMXBean;
 +import java.lang.management.ThreadMXBean;
 +import java.lang.reflect.Field;
 +import java.net.InetSocketAddress;
 +import java.util.ArrayList;
 +import java.util.Hashtable;
 +import java.util.List;
 +import java.util.Map;
 +import java.util.StringTokenizer;
 +import java.util.Timer;
 +import java.util.TimerTask;
 +import java.util.concurrent.ExecutorService;
 +import java.util.concurrent.Executors;
 +import java.util.concurrent.TimeUnit;
 +import java.util.logging.Level;
 +import java.util.logging.Logger;
 +
 +import org.apache.commons.lang3.mutable.Mutable;
 +import org.apache.commons.lang3.mutable.MutableObject;
 +import org.apache.hyracks.api.application.INCApplicationEntryPoint;
 +import org.apache.hyracks.api.client.NodeControllerInfo;
 +import org.apache.hyracks.api.comm.NetworkAddress;
 +import org.apache.hyracks.api.context.IHyracksRootContext;
 +import org.apache.hyracks.api.dataset.IDatasetPartitionManager;
 +import org.apache.hyracks.api.deployment.DeploymentId;
 +import org.apache.hyracks.api.io.IODeviceHandle;
 +import org.apache.hyracks.api.job.JobId;
 +import org.apache.hyracks.api.lifecycle.ILifeCycleComponentManager;
 +import org.apache.hyracks.api.lifecycle.LifeCycleComponentManager;
 +import org.apache.hyracks.api.service.IControllerService;
 +import org.apache.hyracks.control.common.base.IClusterController;
 +import org.apache.hyracks.control.common.context.ServerContext;
 +import org.apache.hyracks.control.common.controllers.NCConfig;
 +import org.apache.hyracks.control.common.controllers.NodeParameters;
 +import org.apache.hyracks.control.common.controllers.NodeRegistration;
 +import org.apache.hyracks.control.common.heartbeat.HeartbeatData;
 +import org.apache.hyracks.control.common.heartbeat.HeartbeatSchema;
 +import org.apache.hyracks.control.common.ipc.CCNCFunctions;
 +import org.apache.hyracks.control.common.ipc.CCNCFunctions.StateDumpRequestFunction;
 +import org.apache.hyracks.control.common.ipc.ClusterControllerRemoteProxy;
 +import org.apache.hyracks.control.common.job.profiling.om.JobProfile;
 +import org.apache.hyracks.control.common.work.FutureValue;
 +import org.apache.hyracks.control.common.work.WorkQueue;
 +import org.apache.hyracks.control.nc.application.NCApplicationContext;
 +import org.apache.hyracks.control.nc.dataset.DatasetPartitionManager;
 +import org.apache.hyracks.control.nc.io.IOManager;
 +import org.apache.hyracks.control.nc.io.profiling.IIOCounter;
 +import org.apache.hyracks.control.nc.io.profiling.IOCounterFactory;
 +import org.apache.hyracks.control.nc.net.DatasetNetworkManager;
 +import org.apache.hyracks.control.nc.net.NetworkManager;
 +import org.apache.hyracks.control.nc.partitions.PartitionManager;
 +import org.apache.hyracks.control.nc.resources.memory.MemoryManager;
 +import org.apache.hyracks.control.nc.runtime.RootHyracksContext;
 +import org.apache.hyracks.control.nc.work.AbortTasksWork;
 +import org.apache.hyracks.control.nc.work.ApplicationMessageWork;
 +import org.apache.hyracks.control.nc.work.BuildJobProfilesWork;
 +import org.apache.hyracks.control.nc.work.CleanupJobletWork;
 +import org.apache.hyracks.control.nc.work.DeployBinaryWork;
 +import org.apache.hyracks.control.nc.work.ReportPartitionAvailabilityWork;
 +import org.apache.hyracks.control.nc.work.ShutdownWork;
 +import org.apache.hyracks.control.nc.work.StartTasksWork;
 +import org.apache.hyracks.control.nc.work.StateDumpWork;
 +import org.apache.hyracks.control.nc.work.UnDeployBinaryWork;
 +import org.apache.hyracks.ipc.api.IIPCHandle;
 +import org.apache.hyracks.ipc.api.IIPCI;
 +import org.apache.hyracks.ipc.api.IPCPerformanceCounters;
 +import org.apache.hyracks.ipc.impl.IPCSystem;
 +import org.apache.hyracks.net.protocols.muxdemux.MuxDemuxPerformanceCounters;
 +
 +public class NodeControllerService implements IControllerService {
 +    private static Logger LOGGER = Logger.getLogger(NodeControllerService.class.getName());
 +
 +    private static final double MEMORY_FUDGE_FACTOR = 0.8;
 +
 +    private NCConfig ncConfig;
 +
 +    private final String id;
 +
 +    private final IHyracksRootContext ctx;
 +
 +    private final IPCSystem ipc;
 +
 +    private final PartitionManager partitionManager;
 +
 +    private final NetworkManager netManager;
 +
 +    private IDatasetPartitionManager datasetPartitionManager;
 +
 +    private DatasetNetworkManager datasetNetworkManager;
 +
 +    private final WorkQueue queue;
 +
 +    private final Timer timer;
 +
 +    private boolean registrationPending;
 +
 +    private Exception registrationException;
 +
 +    private IClusterController ccs;
 +
 +    private final Map<JobId, Joblet> jobletMap;
 +
 +    private ExecutorService executor;
 +
 +    private NodeParameters nodeParameters;
 +
 +    private HeartbeatTask heartbeatTask;
 +
 +    private final ServerContext serverCtx;
 +
 +    private NCApplicationContext appCtx;
 +
 +    private INCApplicationEntryPoint ncAppEntryPoint;
 +
 +    private final ILifeCycleComponentManager lccm;
 +
 +    private final MemoryMXBean memoryMXBean;
 +
 +    private final List<GarbageCollectorMXBean> gcMXBeans;
 +
 +    private final ThreadMXBean threadMXBean;
 +
 +    private final RuntimeMXBean runtimeMXBean;
 +
 +    private final OperatingSystemMXBean osMXBean;
 +
 +    private final Mutable<FutureValue<Map<String, NodeControllerInfo>>> getNodeControllerInfosAcceptor;
 +
 +    private final MemoryManager memoryManager;
 +
 +    private boolean shuttedDown = false;
 +
 +    private IIOCounter ioCounter;
 +
 +    public NodeControllerService(NCConfig ncConfig) throws Exception {
 +        this.ncConfig = ncConfig;
 +        id = ncConfig.nodeId;
 +        NodeControllerIPCI ipci = new NodeControllerIPCI();
 +        ipc = new IPCSystem(new InetSocketAddress(ncConfig.clusterNetIPAddress, ncConfig.clusterNetPort), ipci,
 +                new CCNCFunctions.SerializerDeserializer());
 +
 +        this.ctx = new RootHyracksContext(this, new IOManager(getDevices(ncConfig.ioDevices)));
 +        if (id == null) {
 +            throw new Exception("id not set");
 +        }
 +        partitionManager = new PartitionManager(this);
 +        netManager = new NetworkManager(ncConfig.dataIPAddress, ncConfig.dataPort, partitionManager,
 +                ncConfig.nNetThreads, ncConfig.nNetBuffers, ncConfig.dataPublicIPAddress, ncConfig.dataPublicPort);
 +
 +        lccm = new LifeCycleComponentManager();
 +        queue = new WorkQueue(Thread.NORM_PRIORITY); // Reserves MAX_PRIORITY of the heartbeat thread.
 +        jobletMap = new Hashtable<JobId, Joblet>();
 +        timer = new Timer(true);
 +        serverCtx = new ServerContext(ServerContext.ServerType.NODE_CONTROLLER,
 +                new File(new File(NodeControllerService.class.getName()), id));
 +        memoryMXBean = ManagementFactory.getMemoryMXBean();
 +        gcMXBeans = ManagementFactory.getGarbageCollectorMXBeans();
 +        threadMXBean = ManagementFactory.getThreadMXBean();
 +        runtimeMXBean = ManagementFactory.getRuntimeMXBean();
 +        osMXBean = ManagementFactory.getOperatingSystemMXBean();
 +        registrationPending = true;
 +        getNodeControllerInfosAcceptor = new MutableObject<FutureValue<Map<String, NodeControllerInfo>>>();
 +        memoryManager = new MemoryManager((long) (memoryMXBean.getHeapMemoryUsage().getMax() * MEMORY_FUDGE_FACTOR));
 +        ioCounter = new IOCounterFactory().getIOCounter();
 +    }
 +
 +    public IHyracksRootContext getRootContext() {
 +        return ctx;
 +    }
 +
 +    public NCApplicationContext getApplicationContext() {
 +        return appCtx;
 +    }
 +
 +    public ILifeCycleComponentManager getLifeCycleComponentManager() {
 +        return lccm;
 +    }
 +
 +    private static List<IODeviceHandle> getDevices(String ioDevices) {
 +        List<IODeviceHandle> devices = new ArrayList<IODeviceHandle>();
 +        StringTokenizer tok = new StringTokenizer(ioDevices, ",");
 +        while (tok.hasMoreElements()) {
 +            String devPath = tok.nextToken().trim();
 +            devices.add(new IODeviceHandle(new File(devPath), "."));
 +        }
 +        return devices;
 +    }
 +
 +    private synchronized void setNodeRegistrationResult(NodeParameters parameters, Exception exception) {
 +        this.nodeParameters = parameters;
 +        this.registrationException = exception;
 +        this.registrationPending = false;
 +        notifyAll();
 +    }
 +
 +    public Map<String, NodeControllerInfo> getNodeControllersInfo() throws Exception {
 +        FutureValue<Map<String, NodeControllerInfo>> fv = new FutureValue<Map<String, NodeControllerInfo>>();
 +        synchronized (getNodeControllerInfosAcceptor) {
 +            while (getNodeControllerInfosAcceptor.getValue() != null) {
 +                getNodeControllerInfosAcceptor.wait();
 +            }
 +            getNodeControllerInfosAcceptor.setValue(fv);
 +        }
 +        ccs.getNodeControllerInfos();
 +        return fv.get();
 +    }
 +
 +    private void setNodeControllersInfo(Map<String, NodeControllerInfo> ncInfos) {
 +        FutureValue<Map<String, NodeControllerInfo>> fv;
 +        synchronized (getNodeControllerInfosAcceptor) {
 +            fv = getNodeControllerInfosAcceptor.getValue();
 +            getNodeControllerInfosAcceptor.setValue(null);
 +            getNodeControllerInfosAcceptor.notifyAll();
 +        }
 +        fv.setValue(ncInfos);
 +    }
 +
 +    private void init() throws Exception {
 +        ctx.getIOManager().setExecutor(executor);
 +        datasetPartitionManager = new DatasetPartitionManager(this, executor, ncConfig.resultManagerMemory,
 +                ncConfig.resultTTL, ncConfig.resultSweepThreshold);
 +        datasetNetworkManager = new DatasetNetworkManager(ncConfig.resultIPAddress, ncConfig.resultPort,
 +                datasetPartitionManager, ncConfig.nNetThreads, ncConfig.nNetBuffers, ncConfig.resultPublicIPAddress,
 +                ncConfig.resultPublicPort);
 +    }
 +
 +    @Override
 +    public void start() throws Exception {
 +        LOGGER.log(Level.INFO, "Starting NodeControllerService");
 +        ipc.start();
 +        netManager.start();
 +
 +        startApplication();
 +        init();
 +
 +        datasetNetworkManager.start();
 +        IIPCHandle ccIPCHandle = ipc.getHandle(new InetSocketAddress(ncConfig.ccHost, ncConfig.ccPort), -1);
 +        this.ccs = new ClusterControllerRemoteProxy(ccIPCHandle);
 +        HeartbeatSchema.GarbageCollectorInfo[] gcInfos = new HeartbeatSchema.GarbageCollectorInfo[gcMXBeans.size()];
 +        for (int i = 0; i < gcInfos.length; ++i) {
 +            gcInfos[i] = new HeartbeatSchema.GarbageCollectorInfo(gcMXBeans.get(i).getName());
 +        }
 +        HeartbeatSchema hbSchema = new HeartbeatSchema(gcInfos);
 +        // Use "public" versions of network addresses and ports
 +        NetworkAddress datasetAddress = datasetNetworkManager.getPublicNetworkAddress();
 +        NetworkAddress netAddress = netManager.getPublicNetworkAddress();
 +        if (ncConfig.dataPublicIPAddress != null) {
 +            netAddress = new NetworkAddress(ncConfig.dataPublicIPAddress, ncConfig.dataPublicPort);
 +        }
 +        ccs.registerNode(new NodeRegistration(ipc.getSocketAddress(), id, ncConfig, netAddress, datasetAddress,
 +                osMXBean.getName(), osMXBean.getArch(), osMXBean.getVersion(), osMXBean.getAvailableProcessors(),
 +                runtimeMXBean.getVmName(), runtimeMXBean.getVmVersion(), runtimeMXBean.getVmVendor(),
 +                runtimeMXBean.getClassPath(), runtimeMXBean.getLibraryPath(), runtimeMXBean.getBootClassPath(),
 +                runtimeMXBean.getInputArguments(), runtimeMXBean.getSystemProperties(), hbSchema));
 +
 +        synchronized (this) {
 +            while (registrationPending) {
 +                wait();
 +            }
 +        }
 +        if (registrationException != null) {
 +            throw registrationException;
 +        }
 +        appCtx.setDistributedState(nodeParameters.getDistributedState());
 +
 +        queue.start();
 +
 +        heartbeatTask = new HeartbeatTask(ccs);
 +
 +        // Use reflection to set the priority of the timer thread.
 +        Field threadField = timer.getClass().getDeclaredField("thread");
 +        threadField.setAccessible(true);
 +        Thread timerThread = (Thread) threadField.get(timer); // The internal timer thread of the Timer object.
 +        timerThread.setPriority(Thread.MAX_PRIORITY);
 +        // Schedule heartbeat generator.
 +        timer.schedule(heartbeatTask, 0, nodeParameters.getHeartbeatPeriod());
 +
 +        if (nodeParameters.getProfileDumpPeriod() > 0) {
 +            // Schedule profile dump generator.
 +            timer.schedule(new ProfileDumpTask(ccs), 0, nodeParameters.getProfileDumpPeriod());
 +        }
 +
 +        LOGGER.log(Level.INFO, "Started NodeControllerService");
 +        if (ncAppEntryPoint != null) {
 +            ncAppEntryPoint.notifyStartupComplete();
 +        }
 +
 +        //add JVM shutdown hook
 +        Runtime.getRuntime().addShutdownHook(new JVMShutdownHook(this));
 +    }
 +
 +    private void startApplication() throws Exception {
 +        appCtx = new NCApplicationContext(this, serverCtx, ctx, id, memoryManager, lccm);
 +        String className = ncConfig.appNCMainClass;
 +        if (className != null) {
 +            Class<?> c = Class.forName(className);
 +            ncAppEntryPoint = (INCApplicationEntryPoint) c.newInstance();
 +            String[] args = ncConfig.appArgs == null ? new String[0]
 +                    : ncConfig.appArgs.toArray(new String[ncConfig.appArgs.size()]);
 +            ncAppEntryPoint.start(appCtx, args);
 +        }
 +        executor = Executors.newCachedThreadPool(appCtx.getThreadFactory());
 +    }
 +
 +    @Override
 +    public synchronized void stop() throws Exception {
 +        if (!shuttedDown) {
 +            LOGGER.log(Level.INFO, "Stopping NodeControllerService");
 +            executor.shutdownNow();
 +            if (!executor.awaitTermination(10, TimeUnit.SECONDS)) {
 +                LOGGER.log(Level.SEVERE, "Some jobs failed to exit, continuing shutdown abnormally");
 +            }
 +            partitionManager.close();
 +            datasetPartitionManager.close();
-             heartbeatTask.cancel();
 +            netManager.stop();
 +            datasetNetworkManager.stop();
 +            queue.stop();
-             if (ncAppEntryPoint != null)
++            if (ncAppEntryPoint != null) {
 +                ncAppEntryPoint.stop();
++            }
++            /**
++             * Stop heartbeat after NC has stopped to avoid false node failure detection
++             * on CC if an NC takes a long time to stop.
++             */
++            heartbeatTask.cancel();
 +            LOGGER.log(Level.INFO, "Stopped NodeControllerService");
 +            shuttedDown = true;
 +        }
 +    }
 +
 +    public String getId() {
 +        return id;
 +    }
 +
 +    public ServerContext getServerContext() {
 +        return serverCtx;
 +    }
 +
 +    public Map<JobId, Joblet> getJobletMap() {
 +        return jobletMap;
 +    }
 +
 +    public NetworkManager getNetworkManager() {
 +        return netManager;
 +    }
 +
 +    public DatasetNetworkManager getDatasetNetworkManager() {
 +        return datasetNetworkManager;
 +    }
 +
 +    public PartitionManager getPartitionManager() {
 +        return partitionManager;
 +    }
 +
 +    public IClusterController getClusterController() {
 +        return ccs;
 +    }
 +
 +    public NodeParameters getNodeParameters() {
 +        return nodeParameters;
 +    }
 +
 +    public ExecutorService getExecutorService() {
 +        return executor;
 +    }
 +
 +    public NCConfig getConfiguration() {
 +        return ncConfig;
 +    }
 +
 +    public WorkQueue getWorkQueue() {
 +        return queue;
 +    }
 +
 +    private class HeartbeatTask extends TimerTask {
 +        private IClusterController cc;
 +
 +        private final HeartbeatData hbData;
 +
 +        public HeartbeatTask(IClusterController cc) {
 +            this.cc = cc;
 +            hbData = new HeartbeatData();
 +            hbData.gcCollectionCounts = new long[gcMXBeans.size()];
 +            hbData.gcCollectionTimes = new long[gcMXBeans.size()];
 +        }
 +
 +        @Override
 +        public void run() {
 +            MemoryUsage heapUsage = memoryMXBean.getHeapMemoryUsage();
 +            hbData.heapInitSize = heapUsage.getInit();
 +            hbData.heapUsedSize = heapUsage.getUsed();
 +            hbData.heapCommittedSize = heapUsage.getCommitted();
 +            hbData.heapMaxSize = heapUsage.getMax();
 +            MemoryUsage nonheapUsage = memoryMXBean.getNonHeapMemoryUsage();
 +            hbData.nonheapInitSize = nonheapUsage.getInit();
 +            hbData.nonheapUsedSize = nonheapUsage.getUsed();
 +            hbData.nonheapCommittedSize = nonheapUsage.getCommitted();
 +            hbData.nonheapMaxSize = nonheapUsage.getMax();
 +            hbData.threadCount = threadMXBean.getThreadCount();
 +            hbData.peakThreadCount = threadMXBean.getPeakThreadCount();
 +            hbData.totalStartedThreadCount = threadMXBean.getTotalStartedThreadCount();
 +            hbData.systemLoadAverage = osMXBean.getSystemLoadAverage();
 +            int gcN = gcMXBeans.size();
 +            for (int i = 0; i < gcN; ++i) {
 +                GarbageCollectorMXBean gcMXBean = gcMXBeans.get(i);
 +                hbData.gcCollectionCounts[i] = gcMXBean.getCollectionCount();
 +                hbData.gcCollectionTimes[i] = gcMXBean.getCollectionTime();
 +            }
 +
 +            MuxDemuxPerformanceCounters netPC = netManager.getPerformanceCounters();
 +            hbData.netPayloadBytesRead = netPC.getPayloadBytesRead();
 +            hbData.netPayloadBytesWritten = netPC.getPayloadBytesWritten();
 +            hbData.netSignalingBytesRead = netPC.getSignalingBytesRead();
 +            hbData.netSignalingBytesWritten = netPC.getSignalingBytesWritten();
 +
 +            MuxDemuxPerformanceCounters datasetNetPC = datasetNetworkManager.getPerformanceCounters();
 +            hbData.datasetNetPayloadBytesRead = datasetNetPC.getPayloadBytesRead();
 +            hbData.datasetNetPayloadBytesWritten = datasetNetPC.getPayloadBytesWritten();
 +            hbData.datasetNetSignalingBytesRead = datasetNetPC.getSignalingBytesRead();
 +            hbData.datasetNetSignalingBytesWritten = datasetNetPC.getSignalingBytesWritten();
 +
 +            IPCPerformanceCounters ipcPC = ipc.getPerformanceCounters();
 +            hbData.ipcMessagesSent = ipcPC.getMessageSentCount();
 +            hbData.ipcMessageBytesSent = ipcPC.getMessageBytesSent();
 +            hbData.ipcMessagesReceived = ipcPC.getMessageReceivedCount();
 +            hbData.ipcMessageBytesReceived = ipcPC.getMessageBytesReceived();
 +
 +            hbData.diskReads = ioCounter.getReads();
 +            hbData.diskWrites = ioCounter.getWrites();
 +
 +            try {
 +                cc.nodeHeartbeat(id, hbData);
 +            } catch (Exception e) {
 +                e.printStackTrace();
 +            }
 +        }
 +    }
 +
 +    private class ProfileDumpTask extends TimerTask {
 +        private IClusterController cc;
 +
 +        public ProfileDumpTask(IClusterController cc) {
 +            this.cc = cc;
 +        }
 +
 +        @Override
 +        public void run() {
 +            try {
 +                FutureValue<List<JobProfile>> fv = new FutureValue<List<JobProfile>>();
 +                BuildJobProfilesWork bjpw = new BuildJobProfilesWork(NodeControllerService.this, fv);
 +                queue.scheduleAndSync(bjpw);
 +                List<JobProfile> profiles = fv.get();
 +                if (!profiles.isEmpty()) {
 +                    cc.reportProfile(id, profiles);
 +                }
 +            } catch (Exception e) {
 +                e.printStackTrace();
 +            }
 +        }
 +    }
 +
 +    private final class NodeControllerIPCI implements IIPCI {
 +        @Override
 +        public void deliverIncomingMessage(final IIPCHandle handle, long mid, long rmid, Object payload,
 +                Exception exception) {
 +            CCNCFunctions.Function fn = (CCNCFunctions.Function) payload;
 +            switch (fn.getFunctionId()) {
 +                case SEND_APPLICATION_MESSAGE: {
 +                    CCNCFunctions.SendApplicationMessageFunction amf = (CCNCFunctions.SendApplicationMessageFunction) fn;
 +                    queue.schedule(new ApplicationMessageWork(NodeControllerService.this, amf.getMessage(),
 +                            amf.getDeploymentId(), amf.getNodeId()));
 +                    return;
 +                }
 +                case START_TASKS: {
 +                    CCNCFunctions.StartTasksFunction stf = (CCNCFunctions.StartTasksFunction) fn;
 +                    queue.schedule(new StartTasksWork(NodeControllerService.this, stf.getDeploymentId(), stf.getJobId(),
 +                            stf.getPlanBytes(), stf.getTaskDescriptors(), stf.getConnectorPolicies(), stf.getFlags()));
 +                    return;
 +                }
 +
 +                case ABORT_TASKS: {
 +                    CCNCFunctions.AbortTasksFunction atf = (CCNCFunctions.AbortTasksFunction) fn;
 +                    queue.schedule(new AbortTasksWork(NodeControllerService.this, atf.getJobId(), atf.getTasks()));
 +                    return;
 +                }
 +
 +                case CLEANUP_JOBLET: {
 +                    CCNCFunctions.CleanupJobletFunction cjf = (CCNCFunctions.CleanupJobletFunction) fn;
 +                    queue.schedule(new CleanupJobletWork(NodeControllerService.this, cjf.getJobId(), cjf.getStatus()));
 +                    return;
 +                }
 +
 +                case REPORT_PARTITION_AVAILABILITY: {
 +                    CCNCFunctions.ReportPartitionAvailabilityFunction rpaf = (CCNCFunctions.ReportPartitionAvailabilityFunction) fn;
 +                    queue.schedule(new ReportPartitionAvailabilityWork(NodeControllerService.this,
 +                            rpaf.getPartitionId(), rpaf.getNetworkAddress()));
 +                    return;
 +                }
 +
 +                case NODE_REGISTRATION_RESULT: {
 +                    CCNCFunctions.NodeRegistrationResult nrrf = (CCNCFunctions.NodeRegistrationResult) fn;
 +                    setNodeRegistrationResult(nrrf.getNodeParameters(), nrrf.getException());
 +                    return;
 +                }
 +
 +                case GET_NODE_CONTROLLERS_INFO_RESPONSE: {
 +                    CCNCFunctions.GetNodeControllersInfoResponseFunction gncirf = (CCNCFunctions.GetNodeControllersInfoResponseFunction) fn;
 +                    setNodeControllersInfo(gncirf.getNodeControllerInfos());
 +                    return;
 +                }
 +
 +                case DEPLOY_BINARY: {
 +                    CCNCFunctions.DeployBinaryFunction ndbf = (CCNCFunctions.DeployBinaryFunction) fn;
 +                    queue.schedule(new DeployBinaryWork(NodeControllerService.this, ndbf.getDeploymentId(),
 +                            ndbf.getBinaryURLs()));
 +                    return;
 +                }
 +
 +                case UNDEPLOY_BINARY: {
 +                    CCNCFunctions.UnDeployBinaryFunction ndbf = (CCNCFunctions.UnDeployBinaryFunction) fn;
 +                    queue.schedule(new UnDeployBinaryWork(NodeControllerService.this, ndbf.getDeploymentId()));
 +                    return;
 +                }
 +
 +                case STATE_DUMP_REQUEST: {
 +                    final CCNCFunctions.StateDumpRequestFunction dsrf = (StateDumpRequestFunction) fn;
 +                    queue.schedule(new StateDumpWork(NodeControllerService.this, dsrf.getStateDumpId()));
 +                    return;
 +                }
 +                case SHUTDOWN_REQUEST: {
 +                    queue.schedule(new ShutdownWork(NodeControllerService.this));
 +                    return;
 +                }
 +            }
 +            throw new IllegalArgumentException("Unknown function: " + fn.getFunctionId());
 +
 +        }
 +    }
 +
 +    public void sendApplicationMessageToCC(byte[] data, DeploymentId deploymentId) throws Exception {
 +        ccs.sendApplicationMessageToCC(data, deploymentId, id);
 +    }
 +
 +    public IDatasetPartitionManager getDatasetPartitionManager() {
 +        return datasetPartitionManager;
 +    }
 +
 +    /**
 +     * Shutdown hook that invokes {@link NCApplicationEntryPoint#stop() stop} method.
 +     */
 +    private static class JVMShutdownHook extends Thread {
 +
 +        private final NodeControllerService nodeControllerService;
 +
 +        public JVMShutdownHook(NodeControllerService ncAppEntryPoint) {
 +            this.nodeControllerService = ncAppEntryPoint;
 +        }
 +
 +        @Override
 +        public void run() {
 +            if (LOGGER.isLoggable(Level.INFO)) {
 +                LOGGER.info("Shutdown hook in progress");
 +            }
 +            try {
 +                nodeControllerService.stop();
 +            } catch (Exception e) {
 +                if (LOGGER.isLoggable(Level.WARNING)) {
 +                    LOGGER.warning("Exception in executing shutdown hook" + e);
 +                }
 +            }
 +        }
 +    }
 +}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e928b6ac/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/file/FileRemoveOperatorDescriptor.java
----------------------------------------------------------------------
diff --cc hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/file/FileRemoveOperatorDescriptor.java
index cf2008c,0000000..c3883e8
mode 100644,000000..100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/file/FileRemoveOperatorDescriptor.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/file/FileRemoveOperatorDescriptor.java
@@@ -1,88 -1,0 +1,104 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +
 +package org.apache.hyracks.dataflow.std.file;
 +
 +import java.io.File;
 +import java.io.IOException;
 +
 +import org.apache.commons.io.FileUtils;
 +import org.apache.hyracks.api.comm.IFrameWriter;
 +import org.apache.hyracks.api.context.IHyracksTaskContext;
 +import org.apache.hyracks.api.dataflow.IOperatorNodePushable;
 +import org.apache.hyracks.api.dataflow.value.IRecordDescriptorProvider;
 +import org.apache.hyracks.api.dataflow.value.RecordDescriptor;
 +import org.apache.hyracks.api.exceptions.HyracksDataException;
 +import org.apache.hyracks.api.io.IIOManager;
 +import org.apache.hyracks.api.job.IOperatorDescriptorRegistry;
 +import org.apache.hyracks.dataflow.std.base.AbstractOperatorNodePushable;
 +import org.apache.hyracks.dataflow.std.base.AbstractSingleActivityOperatorDescriptor;
 +
 +public class FileRemoveOperatorDescriptor extends AbstractSingleActivityOperatorDescriptor {
 +
 +    private final IFileSplitProvider fileSplitProvider;
++    private final boolean quietly;
 +
-     public FileRemoveOperatorDescriptor(IOperatorDescriptorRegistry spec, IFileSplitProvider fileSplitProvder) {
++    public FileRemoveOperatorDescriptor(IOperatorDescriptorRegistry spec, IFileSplitProvider fileSplitProvder,
++            boolean quietly) {
 +        super(spec, 0, 0);
 +        this.fileSplitProvider = fileSplitProvder;
++        this.quietly = quietly;
++    }
++
++    /**
++     *
++     * @deprecated use {@link #FileRemoveOperatorDescriptor(IOperatorDescriptorRegistry spec, IFileSplitProvider fileSplitProvder, boolean quietly)} instead.
++     */
++    @Deprecated
++    public FileRemoveOperatorDescriptor(IOperatorDescriptorRegistry spec, IFileSplitProvider fileSplitProvder) {
++        this(spec, fileSplitProvder, false);
 +    }
 +
 +    private static final long serialVersionUID = 1L;
 +
 +    @Override
 +    public IOperatorNodePushable createPushRuntime(IHyracksTaskContext ctx,
 +            IRecordDescriptorProvider recordDescProvider, int partition, int nPartitions) throws HyracksDataException {
 +        final FileSplit split = fileSplitProvider.getFileSplits()[partition];
 +        final String path = split.getLocalFile().getFile().getPath();
 +        final int deviceId = split.getIODeviceId();
 +        final IIOManager ioManager = ctx.getIOManager();
 +        return new AbstractOperatorNodePushable() {
 +
 +            @Override
 +            public void setOutputFrameWriter(int index, IFrameWriter writer, RecordDescriptor recordDesc) {
 +                throw new IllegalStateException();
 +            }
 +
 +            @Override
 +            public void initialize() throws HyracksDataException {
 +                File f = ioManager.getAbsoluteFileRef(deviceId, path).getFile();
-                 try {
-                     FileUtils.deleteDirectory(f);
-                 } catch (IOException e) {
-                     throw new HyracksDataException(e);
++                if (quietly) {
++                    FileUtils.deleteQuietly(f);
++                } else {
++                    try {
++                        FileUtils.deleteDirectory(f);
++                    } catch (IOException e) {
++                        throw new HyracksDataException(e);
++                    }
 +                }
 +            }
 +
 +            @Override
 +            public IFrameWriter getInputFrameWriter(int index) {
 +                throw new IllegalStateException();
 +            }
 +
 +            @Override
 +            public int getInputArity() {
 +                return 0;
 +            }
 +
 +            @Override
 +            public void deinitialize() throws HyracksDataException {
 +            }
 +        };
 +    }
 +
 +}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e928b6ac/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-btree/src/main/java/org/apache/hyracks/storage/am/lsm/btree/impls/ExternalBTreeOpContext.java
----------------------------------------------------------------------
diff --cc hyracks-fullstack/hyracks/hyracks-storage-am-lsm-btree/src/main/java/org/apache/hyracks/storage/am/lsm/btree/impls/ExternalBTreeOpContext.java
index d63671e,0000000..29fedef
mode 100644,000000..100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-btree/src/main/java/org/apache/hyracks/storage/am/lsm/btree/impls/ExternalBTreeOpContext.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-btree/src/main/java/org/apache/hyracks/storage/am/lsm/btree/impls/ExternalBTreeOpContext.java
@@@ -1,145 -1,0 +1,146 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +package org.apache.hyracks.storage.am.lsm.btree.impls;
 +
 +import java.util.LinkedList;
 +import java.util.List;
 +
 +import org.apache.hyracks.api.dataflow.value.IBinaryComparatorFactory;
 +import org.apache.hyracks.storage.am.btree.api.IBTreeLeafFrame;
 +import org.apache.hyracks.storage.am.common.api.IModificationOperationCallback;
 +import org.apache.hyracks.storage.am.common.api.ISearchOperationCallback;
 +import org.apache.hyracks.storage.am.common.api.ISearchPredicate;
 +import org.apache.hyracks.storage.am.common.api.ITreeIndexFrameFactory;
 +import org.apache.hyracks.storage.am.common.ophelpers.IndexOperation;
 +import org.apache.hyracks.storage.am.common.ophelpers.MultiComparator;
 +import org.apache.hyracks.storage.am.lsm.common.api.ILSMComponent;
 +import org.apache.hyracks.storage.am.lsm.common.api.ILSMHarness;
- import org.apache.hyracks.storage.am.lsm.common.api.ILSMIndexOperationContext;
++import org.apache.hyracks.storage.am.lsm.common.impls.AbstractLSMIndexOperationContext;
 +
- public class ExternalBTreeOpContext implements ILSMIndexOperationContext {
++public class ExternalBTreeOpContext extends AbstractLSMIndexOperationContext {
 +    public ITreeIndexFrameFactory insertLeafFrameFactory;
 +    public ITreeIndexFrameFactory deleteLeafFrameFactory;
 +    public IBTreeLeafFrame insertLeafFrame;
 +    public IBTreeLeafFrame deleteLeafFrame;
 +    public IndexOperation op;
 +    public final MultiComparator cmp;
 +    public final MultiComparator bloomFilterCmp;
 +    public final ISearchOperationCallback searchCallback;
 +    private final List<ILSMComponent> componentHolder;
 +    private final List<ILSMComponent> componentsToBeMerged;
 +    private final List<ILSMComponent> componentsToBeReplicated;
 +    private final int targetIndexVersion;
 +    public ISearchPredicate searchPredicate;
 +    public LSMBTreeCursorInitialState searchInitialState;
 +
 +    public ExternalBTreeOpContext(ITreeIndexFrameFactory insertLeafFrameFactory,
 +            ITreeIndexFrameFactory deleteLeafFrameFactory, ISearchOperationCallback searchCallback,
 +            int numBloomFilterKeyFields, IBinaryComparatorFactory[] cmpFactories, int targetIndexVersion,
 +            ILSMHarness lsmHarness) {
 +        if (cmpFactories != null) {
 +            this.cmp = MultiComparator.create(cmpFactories);
 +        } else {
 +            this.cmp = null;
 +        }
 +        bloomFilterCmp = MultiComparator.create(cmpFactories, 0, numBloomFilterKeyFields);
 +        this.insertLeafFrameFactory = insertLeafFrameFactory;
 +        this.deleteLeafFrameFactory = deleteLeafFrameFactory;
 +        this.insertLeafFrame = (IBTreeLeafFrame) insertLeafFrameFactory.createFrame();
 +        this.deleteLeafFrame = (IBTreeLeafFrame) deleteLeafFrameFactory.createFrame();
 +        if (insertLeafFrame != null && this.cmp != null) {
 +            insertLeafFrame.setMultiComparator(cmp);
 +        }
 +        if (deleteLeafFrame != null && this.cmp != null) {
 +            deleteLeafFrame.setMultiComparator(cmp);
 +        }
 +        this.componentHolder = new LinkedList<ILSMComponent>();
 +        this.componentsToBeMerged = new LinkedList<ILSMComponent>();
 +        this.componentsToBeReplicated = new LinkedList<ILSMComponent>();
 +        this.searchCallback = searchCallback;
 +        this.targetIndexVersion = targetIndexVersion;
 +        searchInitialState = new LSMBTreeCursorInitialState(insertLeafFrameFactory, cmp, bloomFilterCmp, lsmHarness,
 +                null, searchCallback, null);
 +    }
 +
 +    @Override
 +    public void setOperation(IndexOperation newOp) {
 +        reset();
 +        this.op = newOp;
 +    }
 +
 +    @Override
 +    public void reset() {
++        super.reset();
 +        componentHolder.clear();
 +        componentsToBeMerged.clear();
 +        componentsToBeReplicated.clear();
 +    }
 +
 +    @Override
 +    public IndexOperation getOperation() {
 +        return op;
 +    }
 +
 +    @Override
 +    public List<ILSMComponent> getComponentHolder() {
 +        return componentHolder;
 +    }
 +
 +    @Override
 +    public ISearchOperationCallback getSearchOperationCallback() {
 +        return searchCallback;
 +    }
 +
 +    // Disk only index should never needs a modification callback
 +    @Override
 +    public IModificationOperationCallback getModificationCallback() {
 +        return null;
 +    }
 +
 +    @Override
 +    public void setCurrentMutableComponentId(int currentMutableComponentId) {
 +        // Do nothing: this method should never be called for this class
 +    }
 +
 +    @Override
 +    public List<ILSMComponent> getComponentsToBeMerged() {
 +        return componentsToBeMerged;
 +    }
 +
 +    // Used by indexes with global transaction
 +    public int getTargetIndexVersion() {
 +        return targetIndexVersion;
 +    }
 +
 +    @Override
 +    public void setSearchPredicate(ISearchPredicate searchPredicate) {
 +        this.searchPredicate = searchPredicate;
 +    }
 +
 +    @Override
 +    public ISearchPredicate getSearchPredicate() {
 +        return searchPredicate;
 +    }
 +
 +    @Override
 +    public List<ILSMComponent> getComponentsToBeReplicated() {
 +        return componentsToBeReplicated;
 +    }
 +
 +}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e928b6ac/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-btree/src/main/java/org/apache/hyracks/storage/am/lsm/btree/impls/ExternalBTreeWithBuddyOpContext.java
----------------------------------------------------------------------
diff --cc hyracks-fullstack/hyracks/hyracks-storage-am-lsm-btree/src/main/java/org/apache/hyracks/storage/am/lsm/btree/impls/ExternalBTreeWithBuddyOpContext.java
index a837301,0000000..c44f529
mode 100644,000000..100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-btree/src/main/java/org/apache/hyracks/storage/am/lsm/btree/impls/ExternalBTreeWithBuddyOpContext.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-btree/src/main/java/org/apache/hyracks/storage/am/lsm/btree/impls/ExternalBTreeWithBuddyOpContext.java
@@@ -1,134 -1,0 +1,135 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +package org.apache.hyracks.storage.am.lsm.btree.impls;
 +
 +import java.util.LinkedList;
 +import java.util.List;
 +
 +import org.apache.hyracks.api.dataflow.value.IBinaryComparatorFactory;
 +import org.apache.hyracks.storage.am.common.api.IModificationOperationCallback;
 +import org.apache.hyracks.storage.am.common.api.ISearchOperationCallback;
 +import org.apache.hyracks.storage.am.common.api.ISearchPredicate;
 +import org.apache.hyracks.storage.am.common.api.ITreeIndexFrameFactory;
 +import org.apache.hyracks.storage.am.common.impls.NoOpOperationCallback;
 +import org.apache.hyracks.storage.am.common.ophelpers.IndexOperation;
 +import org.apache.hyracks.storage.am.common.ophelpers.MultiComparator;
 +import org.apache.hyracks.storage.am.lsm.common.api.ILSMComponent;
 +import org.apache.hyracks.storage.am.lsm.common.api.ILSMHarness;
- import org.apache.hyracks.storage.am.lsm.common.api.ILSMIndexOperationContext;
++import org.apache.hyracks.storage.am.lsm.common.impls.AbstractLSMIndexOperationContext;
 +
- public class ExternalBTreeWithBuddyOpContext implements ILSMIndexOperationContext {
++public class ExternalBTreeWithBuddyOpContext extends AbstractLSMIndexOperationContext {
 +    private IndexOperation op;
 +    private MultiComparator bTreeCmp;
 +    private MultiComparator buddyBTreeCmp;
 +    public final List<ILSMComponent> componentHolder;
 +    private final List<ILSMComponent> componentsToBeMerged;
 +    private final List<ILSMComponent> componentsToBeReplicated;
 +    public final ISearchOperationCallback searchCallback;
 +    private final int targetIndexVersion;
 +    public ISearchPredicate searchPredicate;
 +    public LSMBTreeWithBuddyCursorInitialState searchInitialState;
 +
 +    public ExternalBTreeWithBuddyOpContext(IBinaryComparatorFactory[] btreeCmpFactories,
 +            IBinaryComparatorFactory[] buddyBtreeCmpFactories, ISearchOperationCallback searchCallback,
 +            int targetIndexVersion, ILSMHarness lsmHarness, ITreeIndexFrameFactory btreeInteriorFrameFactory,
 +            ITreeIndexFrameFactory btreeLeafFrameFactory, ITreeIndexFrameFactory buddyBtreeLeafFrameFactory) {
 +        this.componentHolder = new LinkedList<ILSMComponent>();
 +        this.componentsToBeMerged = new LinkedList<ILSMComponent>();
 +        this.componentsToBeReplicated = new LinkedList<ILSMComponent>();
 +        this.searchCallback = searchCallback;
 +        this.targetIndexVersion = targetIndexVersion;
 +        this.bTreeCmp = MultiComparator.create(btreeCmpFactories);
 +        this.buddyBTreeCmp = MultiComparator.create(buddyBtreeCmpFactories);
 +        searchInitialState = new LSMBTreeWithBuddyCursorInitialState(btreeInteriorFrameFactory, btreeLeafFrameFactory,
 +                buddyBtreeLeafFrameFactory, lsmHarness, MultiComparator.create(btreeCmpFactories),
 +                MultiComparator.create(buddyBtreeCmpFactories), NoOpOperationCallback.INSTANCE, null);
 +    }
 +
 +    @Override
 +    public void setOperation(IndexOperation newOp) {
 +        reset();
 +        this.op = newOp;
 +    }
 +
 +    @Override
 +    public void setCurrentMutableComponentId(int currentMutableComponentId) {
 +        // Do nothing. this should never be called for disk only indexes
 +    }
 +
 +    @Override
 +    public void reset() {
++        super.reset();
 +        componentHolder.clear();
 +        componentsToBeMerged.clear();
 +        componentsToBeReplicated.clear();
 +    }
 +
 +    @Override
 +    public IndexOperation getOperation() {
 +        return op;
 +    }
 +
 +    public MultiComparator getBTreeMultiComparator() {
 +        return bTreeCmp;
 +    }
 +
 +    public MultiComparator getBuddyBTreeMultiComparator() {
 +        return buddyBTreeCmp;
 +    }
 +
 +    @Override
 +    public List<ILSMComponent> getComponentHolder() {
 +        return componentHolder;
 +    }
 +
 +    @Override
 +    public ISearchOperationCallback getSearchOperationCallback() {
 +        return searchCallback;
 +    }
 +
 +    // This should never be needed for disk only indexes
 +    @Override
 +    public IModificationOperationCallback getModificationCallback() {
 +        return null;
 +    }
 +
 +    @Override
 +    public List<ILSMComponent> getComponentsToBeMerged() {
 +        return componentsToBeMerged;
 +    }
 +
 +    public int getTargetIndexVersion() {
 +        return targetIndexVersion;
 +    }
 +
 +    @Override
 +    public void setSearchPredicate(ISearchPredicate searchPredicate) {
 +        this.searchPredicate = searchPredicate;
 +    }
 +
 +    @Override
 +    public ISearchPredicate getSearchPredicate() {
 +        return searchPredicate;
 +    }
 +
 +    @Override
 +    public List<ILSMComponent> getComponentsToBeReplicated() {
 +        return componentsToBeReplicated;
 +    }
 +}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e928b6ac/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-btree/src/main/java/org/apache/hyracks/storage/am/lsm/btree/impls/LSMBTreeOpContext.java
----------------------------------------------------------------------
diff --cc hyracks-fullstack/hyracks/hyracks-storage-am-lsm-btree/src/main/java/org/apache/hyracks/storage/am/lsm/btree/impls/LSMBTreeOpContext.java
index e833283,0000000..31c9d40
mode 100644,000000..100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-btree/src/main/java/org/apache/hyracks/storage/am/lsm/btree/impls/LSMBTreeOpContext.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-btree/src/main/java/org/apache/hyracks/storage/am/lsm/btree/impls/LSMBTreeOpContext.java
@@@ -1,218 -1,0 +1,219 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +
 +package org.apache.hyracks.storage.am.lsm.btree.impls;
 +
 +import java.util.LinkedList;
 +import java.util.List;
 +
 +import org.apache.hyracks.api.dataflow.value.IBinaryComparatorFactory;
 +import org.apache.hyracks.storage.am.btree.api.IBTreeLeafFrame;
 +import org.apache.hyracks.storage.am.btree.impls.BTree;
 +import org.apache.hyracks.storage.am.btree.impls.BTreeOpContext;
 +import org.apache.hyracks.storage.am.btree.impls.BTreeRangeSearchCursor;
 +import org.apache.hyracks.storage.am.btree.impls.RangePredicate;
 +import org.apache.hyracks.storage.am.common.api.IModificationOperationCallback;
 +import org.apache.hyracks.storage.am.common.api.ISearchOperationCallback;
 +import org.apache.hyracks.storage.am.common.api.ISearchPredicate;
 +import org.apache.hyracks.storage.am.common.api.ITreeIndexFrameFactory;
 +import org.apache.hyracks.storage.am.common.impls.NoOpOperationCallback;
 +import org.apache.hyracks.storage.am.common.ophelpers.IndexOperation;
 +import org.apache.hyracks.storage.am.common.ophelpers.MultiComparator;
 +import org.apache.hyracks.storage.am.common.tuples.PermutingTupleReference;
 +import org.apache.hyracks.storage.am.lsm.common.api.ILSMComponent;
 +import org.apache.hyracks.storage.am.lsm.common.api.ILSMHarness;
- import org.apache.hyracks.storage.am.lsm.common.api.ILSMIndexOperationContext;
++import org.apache.hyracks.storage.am.lsm.common.impls.AbstractLSMIndexOperationContext;
 +
- public final class LSMBTreeOpContext implements ILSMIndexOperationContext {
++public final class LSMBTreeOpContext extends AbstractLSMIndexOperationContext {
 +
 +    public ITreeIndexFrameFactory insertLeafFrameFactory;
 +    public ITreeIndexFrameFactory deleteLeafFrameFactory;
 +    public IBTreeLeafFrame insertLeafFrame;
 +    public IBTreeLeafFrame deleteLeafFrame;
 +    public final BTree[] mutableBTrees;
 +    public BTree.BTreeAccessor[] mutableBTreeAccessors;
 +    public BTreeOpContext[] mutableBTreeOpCtxs;
 +    public BTree.BTreeAccessor currentMutableBTreeAccessor;
 +    public BTreeOpContext currentMutableBTreeOpCtx;
 +    public IndexOperation op;
 +    public final MultiComparator cmp;
 +    public final MultiComparator bloomFilterCmp;
 +    public IModificationOperationCallback modificationCallback;
 +    public ISearchOperationCallback searchCallback;
 +    private final List<ILSMComponent> componentHolder;
 +    private final List<ILSMComponent> componentsToBeMerged;
 +    private final List<ILSMComponent> componentsToBeReplicated;
 +    public final PermutingTupleReference indexTuple;
 +    public final MultiComparator filterCmp;
 +    public final PermutingTupleReference filterTuple;
 +    public ISearchPredicate searchPredicate;
 +    public BTreeRangeSearchCursor memCursor;
 +    public LSMBTreeCursorInitialState searchInitialState;
 +    public LSMBTreePointSearchCursor insertSearchCursor;
 +
 +    public LSMBTreeOpContext(List<ILSMComponent> mutableComponents, ITreeIndexFrameFactory insertLeafFrameFactory,
 +            ITreeIndexFrameFactory deleteLeafFrameFactory, IModificationOperationCallback modificationCallback,
 +            ISearchOperationCallback searchCallback, int numBloomFilterKeyFields, int[] btreeFields, int[] filterFields,
 +            ILSMHarness lsmHarness) {
 +        LSMBTreeMemoryComponent c = (LSMBTreeMemoryComponent) mutableComponents.get(0);
 +        IBinaryComparatorFactory cmpFactories[] = c.getBTree().getComparatorFactories();
 +        if (cmpFactories[0] != null) {
 +            this.cmp = MultiComparator.create(c.getBTree().getComparatorFactories());
 +        } else {
 +            this.cmp = null;
 +        }
 +
 +        bloomFilterCmp = MultiComparator.create(c.getBTree().getComparatorFactories(), 0, numBloomFilterKeyFields);
 +
 +        mutableBTrees = new BTree[mutableComponents.size()];
 +        mutableBTreeAccessors = new BTree.BTreeAccessor[mutableComponents.size()];
 +        mutableBTreeOpCtxs = new BTreeOpContext[mutableComponents.size()];
 +        for (int i = 0; i < mutableComponents.size(); i++) {
 +            LSMBTreeMemoryComponent mutableComponent = (LSMBTreeMemoryComponent) mutableComponents.get(i);
 +            mutableBTrees[i] = mutableComponent.getBTree();
 +            mutableBTreeAccessors[i] = (BTree.BTreeAccessor) mutableBTrees[i].createAccessor(modificationCallback,
 +                    NoOpOperationCallback.INSTANCE);
 +            mutableBTreeOpCtxs[i] = mutableBTreeAccessors[i].getOpContext();
 +        }
 +
 +        this.insertLeafFrameFactory = insertLeafFrameFactory;
 +        this.deleteLeafFrameFactory = deleteLeafFrameFactory;
 +        this.insertLeafFrame = (IBTreeLeafFrame) insertLeafFrameFactory.createFrame();
 +        this.deleteLeafFrame = (IBTreeLeafFrame) deleteLeafFrameFactory.createFrame();
 +        if (insertLeafFrame != null && this.cmp != null) {
 +            insertLeafFrame.setMultiComparator(cmp);
 +        }
 +        if (deleteLeafFrame != null && this.cmp != null) {
 +            deleteLeafFrame.setMultiComparator(cmp);
 +        }
 +        this.componentHolder = new LinkedList<ILSMComponent>();
 +        this.componentsToBeMerged = new LinkedList<ILSMComponent>();
 +        this.componentsToBeReplicated = new LinkedList<ILSMComponent>();
 +        this.modificationCallback = modificationCallback;
 +        this.searchCallback = searchCallback;
 +
 +        if (filterFields != null) {
 +            indexTuple = new PermutingTupleReference(btreeFields);
 +            filterCmp = MultiComparator.create(c.getLSMComponentFilter().getFilterCmpFactories());
 +            filterTuple = new PermutingTupleReference(filterFields);
 +        } else {
 +            indexTuple = null;
 +            filterCmp = null;
 +            filterTuple = null;
 +        }
 +        searchPredicate = new RangePredicate(null, null, true, true, cmp, cmp);
 +        if (insertLeafFrame != null) {
 +            memCursor = new BTreeRangeSearchCursor(insertLeafFrame, false);
 +        }
 +
 +        searchInitialState = new LSMBTreeCursorInitialState(insertLeafFrameFactory, cmp, bloomFilterCmp, lsmHarness,
 +                null, searchCallback, null);
 +        insertSearchCursor = new LSMBTreePointSearchCursor(this);
 +    }
 +
 +    @Override
 +    public void setOperation(IndexOperation newOp) {
 +        reset();
 +        this.op = newOp;
 +    }
 +
 +    public void setInsertMode() {
 +        currentMutableBTreeOpCtx.leafFrame = insertLeafFrame;
 +        currentMutableBTreeOpCtx.leafFrameFactory = insertLeafFrameFactory;
 +    }
 +
 +    public void setDeleteMode() {
 +        currentMutableBTreeOpCtx.leafFrame = deleteLeafFrame;
 +        currentMutableBTreeOpCtx.leafFrameFactory = deleteLeafFrameFactory;
 +    }
 +
 +    @Override
 +    public void reset() {
++        super.reset();
 +        componentHolder.clear();
 +        componentsToBeMerged.clear();
 +        componentsToBeReplicated.clear();
 +    }
 +
 +    @Override
 +    public IndexOperation getOperation() {
 +        return op;
 +    }
 +
 +    @Override
 +    public List<ILSMComponent> getComponentHolder() {
 +        return componentHolder;
 +    }
 +
 +    @Override
 +    public ISearchOperationCallback getSearchOperationCallback() {
 +        return searchCallback;
 +    }
 +
 +    @Override
 +    public IModificationOperationCallback getModificationCallback() {
 +        return modificationCallback;
 +    }
 +
 +    @Override
 +    public void setCurrentMutableComponentId(int currentMutableComponentId) {
 +        currentMutableBTreeAccessor = mutableBTreeAccessors[currentMutableComponentId];
 +        currentMutableBTreeOpCtx = mutableBTreeOpCtxs[currentMutableComponentId];
 +        switch (op) {
 +            case SEARCH:
 +                break;
 +            case DISKORDERSCAN:
 +            case UPDATE:
 +                // Attention: It is important to leave the leafFrame and
 +                // leafFrameFactory of the mutableBTree as is when doing an update.
 +                // Update will only be set if a previous attempt to delete or
 +                // insert failed, so we must preserve the semantics of the
 +                // previously requested operation.
 +                break;
 +            case UPSERT:
 +            case INSERT:
 +                setInsertMode();
 +                break;
 +            case PHYSICALDELETE:
 +            case DELETE:
 +                setDeleteMode();
 +                break;
 +        }
 +    }
 +
 +    @Override
 +    public List<ILSMComponent> getComponentsToBeMerged() {
 +        return componentsToBeMerged;
 +    }
 +
 +    @Override
 +    public void setSearchPredicate(ISearchPredicate searchPredicate) {
 +        this.searchPredicate = searchPredicate;
 +    }
 +
 +    @Override
 +    public ISearchPredicate getSearchPredicate() {
 +        return searchPredicate;
 +    }
 +
 +    @Override
 +    public List<ILSMComponent> getComponentsToBeReplicated() {
 +        return componentsToBeReplicated;
 +    }
 +}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e928b6ac/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/api/ILSMIndex.java
----------------------------------------------------------------------
diff --cc hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/api/ILSMIndex.java
index c1cef2d,0000000..11b933d
mode 100644,000000..100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/api/ILSMIndex.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/api/ILSMIndex.java
@@@ -1,54 -1,0 +1,60 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +
 +package org.apache.hyracks.storage.am.lsm.common.api;
 +
 +import java.util.List;
 +
 +import org.apache.hyracks.api.exceptions.HyracksDataException;
 +import org.apache.hyracks.storage.am.common.api.IIndex;
 +import org.apache.hyracks.storage.am.common.api.IModificationOperationCallback;
 +import org.apache.hyracks.storage.am.common.api.ISearchOperationCallback;
 +import org.apache.hyracks.storage.am.lsm.common.impls.LSMHarness;
 +
 +/**
 + * Methods to be implemented by an LSM index, which are called from {@link LSMHarness}.
 + * The implementations of the methods below should be thread agnostic.
 + * Synchronization of LSM operations like updates/searches/flushes/merges are
 + * done by the {@link LSMHarness}. For example, a flush() implementation should only
 + * create and return the new on-disk component, ignoring the fact that
 + * concurrent searches/updates/merges may be ongoing.
 + */
 +public interface ILSMIndex extends IIndex {
 +
 +    public void deactivate(boolean flushOnExit) throws HyracksDataException;
 +
++    @Override
 +    public ILSMIndexAccessor createAccessor(IModificationOperationCallback modificationCallback,
 +            ISearchOperationCallback searchCallback) throws HyracksDataException;
 +
 +    public ILSMOperationTracker getOperationTracker();
 +
 +    public ILSMIOOperationScheduler getIOScheduler();
 +
 +    public ILSMIOOperationCallback getIOOperationCallback();
 +
 +    public List<ILSMComponent> getImmutableComponents();
 +
 +    public boolean isPrimaryIndex();
++
++    /**
++     * @return true if the index is durable. Otherwise false.
++     */
++    public boolean isDurable();
 +}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e928b6ac/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/api/ILSMIndexOperationContext.java
----------------------------------------------------------------------
diff --cc hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/api/ILSMIndexOperationContext.java
index 99f981d,0000000..acf2233
mode 100644,000000..100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/api/ILSMIndexOperationContext.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/api/ILSMIndexOperationContext.java
@@@ -1,44 -1,0 +1,51 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +package org.apache.hyracks.storage.am.lsm.common.api;
 +
 +import java.util.List;
 +
 +import org.apache.hyracks.storage.am.common.api.IIndexOperationContext;
 +import org.apache.hyracks.storage.am.common.api.IModificationOperationCallback;
 +import org.apache.hyracks.storage.am.common.api.ISearchOperationCallback;
 +import org.apache.hyracks.storage.am.common.api.ISearchPredicate;
 +
 +public interface ILSMIndexOperationContext extends IIndexOperationContext {
 +    public List<ILSMComponent> getComponentHolder();
 +
 +    public List<ILSMComponent> getComponentsToBeMerged();
 +
 +    public ISearchOperationCallback getSearchOperationCallback();
 +
 +    public IModificationOperationCallback getModificationCallback();
 +
 +    public void setCurrentMutableComponentId(int currentMutableComponentId);
 +
 +    public void setSearchPredicate(ISearchPredicate searchPredicate);
 +
 +    public ISearchPredicate getSearchPredicate();
 +
 +    public List<ILSMComponent> getComponentsToBeReplicated();
++
++    /**
++     * @return true if this operation entered the components. Otherwise false.
++     */
++    public boolean isAccessingComponents();
++
++    public void setAccessingComponents(boolean accessingComponents);
 +}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/e928b6ac/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/AbstractLSMIndex.java
----------------------------------------------------------------------
diff --cc hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/AbstractLSMIndex.java
index 441dda1,0000000..440ad31
mode 100644,000000..100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/AbstractLSMIndex.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/AbstractLSMIndex.java
@@@ -1,296 -1,0 +1,299 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *   http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +
 +package org.apache.hyracks.storage.am.lsm.common.impls;
 +
 +import java.io.IOException;
 +import java.util.ArrayList;
 +import java.util.HashSet;
 +import java.util.LinkedList;
 +import java.util.List;
 +import java.util.Set;
 +import java.util.concurrent.atomic.AtomicBoolean;
 +import java.util.concurrent.atomic.AtomicInteger;
 +
 +import org.apache.hyracks.api.exceptions.HyracksDataException;
 +import org.apache.hyracks.api.replication.IReplicationJob.ReplicationExecutionType;
 +import org.apache.hyracks.api.replication.IReplicationJob.ReplicationOperation;
 +import org.apache.hyracks.storage.am.bloomfilter.impls.BloomFilter;
 +import org.apache.hyracks.storage.am.common.api.ITreeIndex;
- import org.apache.hyracks.storage.am.common.api.ITreeIndexMetaDataFrame;
 +import org.apache.hyracks.storage.am.lsm.common.api.ILSMComponent;
 +import org.apache.hyracks.storage.am.lsm.common.api.ILSMComponent.ComponentState;
 +import org.apache.hyracks.storage.am.lsm.common.api.ILSMComponentFilterFrameFactory;
 +import org.apache.hyracks.storage.am.lsm.common.api.ILSMHarness;
 +import org.apache.hyracks.storage.am.lsm.common.api.ILSMIOOperationCallback;
 +import org.apache.hyracks.storage.am.lsm.common.api.ILSMIOOperationScheduler;
 +import org.apache.hyracks.storage.am.lsm.common.api.ILSMIndexFileManager;
 +import org.apache.hyracks.storage.am.lsm.common.api.ILSMIndexInternal;
 +import org.apache.hyracks.storage.am.lsm.common.api.ILSMIndexOperationContext;
 +import org.apache.hyracks.storage.am.lsm.common.api.ILSMMergePolicy;
 +import org.apache.hyracks.storage.am.lsm.common.api.ILSMOperationTracker;
 +import org.apache.hyracks.storage.am.lsm.common.api.IVirtualBufferCache;
 +import org.apache.hyracks.storage.am.lsm.common.api.LSMOperationType;
 +import org.apache.hyracks.storage.common.buffercache.IBufferCache;
- import org.apache.hyracks.storage.common.buffercache.ICachedPage;
- import org.apache.hyracks.storage.common.file.BufferedFileHandle;
 +import org.apache.hyracks.storage.common.file.IFileMapProvider;
 +
 +public abstract class AbstractLSMIndex implements ILSMIndexInternal {
 +    protected final ILSMHarness lsmHarness;
 +
 +    protected final ILSMIOOperationScheduler ioScheduler;
 +    protected final ILSMIOOperationCallback ioOpCallback;
 +
 +    // In-memory components.
 +    protected final List<ILSMComponent> memoryComponents;
 +    protected final List<IVirtualBufferCache> virtualBufferCaches;
 +    protected AtomicInteger currentMutableComponentId;
 +
 +    // On-disk components.
 +    protected final IBufferCache diskBufferCache;
 +    protected final ILSMIndexFileManager fileManager;
 +    protected final IFileMapProvider diskFileMapProvider;
 +    protected final List<ILSMComponent> diskComponents;
 +    protected final List<ILSMComponent> inactiveDiskComponents;
 +    protected final double bloomFilterFalsePositiveRate;
 +    protected final ILSMComponentFilterFrameFactory filterFrameFactory;
 +    protected final LSMComponentFilterManager filterManager;
 +    protected final int[] filterFields;
 +    protected final boolean durable;
 +
 +    protected boolean isActivated;
 +    protected final AtomicBoolean[] flushRequests;
 +    protected boolean memoryComponentsAllocated = false;
 +
 +    public AbstractLSMIndex(List<IVirtualBufferCache> virtualBufferCaches, IBufferCache diskBufferCache,
-             ILSMIndexFileManager fileManager, IFileMapProvider diskFileMapProvider,
-             double bloomFilterFalsePositiveRate, ILSMMergePolicy mergePolicy, ILSMOperationTracker opTracker,
-             ILSMIOOperationScheduler ioScheduler, ILSMIOOperationCallback ioOpCallback,
-             ILSMComponentFilterFrameFactory filterFrameFactory, LSMComponentFilterManager filterManager,
-             int[] filterFields, boolean durable) {
++            ILSMIndexFileManager fileManager, IFileMapProvider diskFileMapProvider, double bloomFilterFalsePositiveRate,
++            ILSMMergePolicy mergePolicy, ILSMOperationTracker opTracker, ILSMIOOperationScheduler ioScheduler,
++            ILSMIOOperationCallback ioOpCallback, ILSMComponentFilterFrameFactory filterFrameFactory,
++            LSMComponentFilterManager filterManager, int[] filterFields, boolean durable) {
 +        this.virtualBufferCaches = virtualBufferCaches;
 +        this.diskBufferCache = diskBufferCache;
 +        this.diskFileMapProvider = diskFileMapProvider;
 +        this.fileManager = fileManager;
 +        this.bloomFilterFalsePositiveRate = bloomFilterFalsePositiveRate;
 +        this.ioScheduler = ioScheduler;
 +        this.ioOpCallback = ioOpCallback;
 +        this.ioOpCallback.setNumOfMutableComponents(virtualBufferCaches.size());
 +        this.filterFrameFactory = filterFrameFactory;
 +        this.filterManager = filterManager;
 +        this.filterFields = filterFields;
 +        this.inactiveDiskComponents = new LinkedList<ILSMComponent>();
 +        this.durable = durable;
 +        lsmHarness = new LSMHarness(this, mergePolicy, opTracker, diskBufferCache.isReplicationEnabled());
 +        isActivated = false;
 +        diskComponents = new ArrayList<ILSMComponent>();
 +        memoryComponents = new ArrayList<ILSMComponent>();
 +        currentMutableComponentId = new AtomicInteger();
 +        flushRequests = new AtomicBoolean[virtualBufferCaches.size()];
 +        for (int i = 0; i < virtualBufferCaches.size(); i++) {
 +            flushRequests[i] = new AtomicBoolean();
 +        }
 +    }
 +
 +    // The constructor used by external indexes
 +    public AbstractLSMIndex(IBufferCache diskBufferCache, ILSMIndexFileManager fileManager,
 +            IFileMapProvider diskFileMapProvider, double bloomFilterFalsePositiveRate, ILSMMergePolicy mergePolicy,
 +            ILSMOperationTracker opTracker, ILSMIOOperationScheduler ioScheduler, ILSMIOOperationCallback ioOpCallback,
 +            boolean durable) {
 +        this.diskBufferCache = diskBufferCache;
 +        this.diskFileMapProvider = diskFileMapProvider;
 +        this.fileManager = fileManager;
 +        this.bloomFilterFalsePositiveRate = bloomFilterFalsePositiveRate;
 +        this.ioScheduler = ioScheduler;
 +        this.ioOpCallback = ioOpCallback;
 +        this.durable = durable;
 +        lsmHarness = new ExternalIndexHarness(this, mergePolicy, opTracker, diskBufferCache.isReplicationEnabled());
 +        isActivated = false;
 +        diskComponents = new LinkedList<ILSMComponent>();
 +        this.inactiveDiskComponents = new LinkedList<ILSMComponent>();
 +        // Memory related objects are nulled
 +        this.virtualBufferCaches = null;
 +        memoryComponents = null;
 +        currentMutableComponentId = null;
 +        flushRequests = null;
 +        filterFrameFactory = null;
 +        filterManager = null;
 +        filterFields = null;
 +    }
 +
 +    protected void markAsValidInternal(ITreeIndex treeIndex) throws HyracksDataException {
 +        int fileId = treeIndex.getFileId();
 +        IBufferCache bufferCache = treeIndex.getBufferCache();
 +        treeIndex.getMetaManager().close();
 +        // WARNING: flushing the metadata page should be done after releasing the write latch; otherwise, the page
 +        // won't be flushed to disk because it won't be dirty until the write latch has been released.
 +        // Force modified metadata page to disk.
 +        // If the index is not durable, then the flush is not necessary.
 +        if (durable) {
 +            bufferCache.force(fileId, true);
 +        }
 +    }
 +
 +    protected void markAsValidInternal(IBufferCache bufferCache, BloomFilter filter) throws HyracksDataException {
-         if(durable){
-             bufferCache.force(filter.getFileId(),true);
++        if (durable) {
++            bufferCache.force(filter.getFileId(), true);
 +        }
 +    }
 +
 +    @Override
 +    public void addComponent(ILSMComponent c) throws HyracksDataException {
 +        diskComponents.add(0, c);
 +    }
 +
 +    @Override
 +    public void subsumeMergedComponents(ILSMComponent newComponent, List<ILSMComponent> mergedComponents)
 +            throws HyracksDataException {
 +        int swapIndex = diskComponents.indexOf(mergedComponents.get(0));
 +        diskComponents.removeAll(mergedComponents);
 +        diskComponents.add(swapIndex, newComponent);
 +    }
 +
 +    @Override
 +    public void changeMutableComponent() {
 +        currentMutableComponentId.set((currentMutableComponentId.get() + 1) % memoryComponents.size());
 +        ((AbstractMemoryLSMComponent) memoryComponents.get(currentMutableComponentId.get())).setActive();
 +    }
 +
 +    @Override
 +    public List<ILSMComponent> getImmutableComponents() {
 +        return diskComponents;
 +    }
 +
 +    @Override
 +    public void changeFlushStatusForCurrentMutableCompoent(boolean needsFlush) {
 +        flushRequests[currentMutableComponentId.get()].set(needsFlush);
 +    }
 +
 +    @Override
 +    public boolean hasFlushRequestForCurrentMutableComponent() {
 +        return flushRequests[currentMutableComponentId.get()].get();
 +    }
 +
 +    @Override
 +    public ILSMOperationTracker getOperationTracker() {
 +        return lsmHarness.getOperationTracker();
 +    }
 +
 +    @Override
 +    public ILSMIOOperationScheduler getIOScheduler() {
 +        return ioScheduler;
 +    }
 +
 +    @Override
 +    public ILSMIOOperationCallback getIOOperationCallback() {
 +        return ioOpCallback;
 +    }
 +
 +    @Override
 +    public IBufferCache getBufferCache() {
 +        return diskBufferCache;
 +    }
 +
-     public boolean isEmptyIndex() throws HyracksDataException {
++    public boolean isEmptyIndex() {
 +        boolean isModified = false;
 +        for (ILSMComponent c : memoryComponents) {
 +            AbstractMemoryLSMComponent mutableComponent = (AbstractMemoryLSMComponent) c;
 +            if (mutableComponent.isModified()) {
 +                isModified = true;
 +                break;
 +            }
 +        }
 +        return diskComponents.isEmpty() && !isModified;
 +    }
 +
 +    @Override
 +    public String toString() {
 +        return "LSMIndex [" + fileManager.getBaseDir() + "]";
 +    }
 +
 +    @Override
 +    public boolean hasMemoryComponents() {
 +        return true;
 +    }
 +
 +    @Override
 +    public boolean isCurrentMutableComponentEmpty() throws HyracksDataException {
 +        //check if the current memory component has been modified
 +        return !((AbstractMemoryLSMComponent) memoryComponents.get(currentMutableComponentId.get())).isModified();
 +    }
 +
 +    public void setCurrentMutableComponentState(ComponentState componentState) {
 +        ((AbstractMemoryLSMComponent) memoryComponents.get(currentMutableComponentId.get())).setState(componentState);
 +    }
 +
 +    public ComponentState getCurrentMutableComponentState() {
 +        return ((AbstractMemoryLSMComponent) memoryComponents.get(currentMutableComponentId.get())).getState();
 +    }
 +
 +    public int getCurrentMutableComponentWriterCount() {
 +        return ((AbstractMemoryLSMComponent) memoryComponents.get(currentMutableComponentId.get())).getWriterCount();
 +    }
 +
 +    @Override
 +    public List<ILSMComponent> getInactiveDiskComponents() {
 +        return inactiveDiskComponents;
 +    }
 +
 +    @Override
 +    public void addInactiveDiskComponent(ILSMComponent diskComponent) {
 +        inactiveDiskComponents.add(diskComponent);
 +    }
 +
 +    public abstract Set<String> getLSMComponentPhysicalFiles(ILSMComponent newComponent);
 +
 +    @Override
 +    public void scheduleReplication(ILSMIndexOperationContext ctx, List<ILSMComponent> lsmComponents, boolean bulkload,
 +            ReplicationOperation operation, LSMOperationType opType) throws HyracksDataException {
 +        //get set of files to be replicated for this component
 +        Set<String> componentFiles = new HashSet<String>();
 +
 +        //get set of files to be replicated for each component
 +        for (ILSMComponent lsmComponent : lsmComponents) {
 +            componentFiles.addAll(getLSMComponentPhysicalFiles(lsmComponent));
 +        }
 +
 +        ReplicationExecutionType executionType;
 +        if (bulkload) {
 +            executionType = ReplicationExecutionType.SYNC;
 +        } else {
 +            executionType = ReplicationExecutionType.ASYNC;
 +        }
 +
 +        //create replication job and submit it
 +        LSMIndexReplicationJob job = new LSMIndexReplicationJob(this, ctx, componentFiles, operation, executionType,
 +                opType);
 +        try {
 +            diskBufferCache.getIOReplicationManager().submitJob(job);
 +        } catch (IOException e) {
 +            throw new HyracksDataException(e);
 +        }
 +    }
 +
++    @Override
 +    public abstract void allocateMemoryComponents() throws HyracksDataException;
 +
++    @Override
 +    public boolean isMemoryComponentsAllocated() {
 +        return memoryComponentsAllocated;
 +    }
++
++    @Override
++    public boolean isDurable() {
++        return durable;
++    }
 +}


[45/50] [abbrv] incubator-asterixdb git commit: Move merged files

Posted by im...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d630d1a2/asterix-app/src/test/resources/runtimets/results/external-library/classad-parser-new/classad-parser-new.1.adm
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/resources/runtimets/results/external-library/classad-parser-new/classad-parser-new.1.adm b/asterix-app/src/test/resources/runtimets/results/external-library/classad-parser-new/classad-parser-new.1.adm
deleted file mode 100644
index f54dee9..0000000
--- a/asterix-app/src/test/resources/runtimets/results/external-library/classad-parser-new/classad-parser-new.1.adm
+++ /dev/null
@@ -1,100 +0,0 @@
-{ "GlobalJobId": "submit-3.chtc.wisc.edu#48968872.0#1445354636", "StatsLifetimeStarter": 572059, "JobStartDate": 1445362267, "SubmitEventNotes": "DAG Node: fabp4-0002+fabp4-0002", "JobStatus": 4, "LeaveJobInQueue": false, "WantGlidein": true, "StartdPrincipal": "execute-side@matchsession/128.104.119.175", "WantRHEL6": true, "OnExitRemove": true, "JobCurrentStartDate": 1445561276, "CoreSize": 0, "MATCH_EXP_JOBGLIDEIN_ResourceName": "wisc.edu", "Rank": 0.0d, "ExitStatus": 0, "ResidentSetSize": 125000, "ScheddBday": 1445383086, "RemoteWallClockTime": 769511.0d, "WantCheckpoint": false, "In": "/dev/null", "LastVacateTime": 1445546251, "MaxHosts": 1, "RootDir": "/", "NumRestarts": 0, "RecentBlockWriteKbytes": 0, "DiskUsage_RAW": 32543, "EnteredCurrentStatus": 1446133322, "ResidentSetSize_RAW": 100432, "RequestDisk": 1000000, "MyType": "Job", "PeriodicRemove": false, "Cmd": "/home/ssericksen/dude-14-xdock/ChtcRun/chtcjobwrapper", "CondorVersion": "$CondorVersion: 8.5.0 Sep 16 2015 BuildID
 : 341710 $", "ShouldTransferFiles": "YES", "TargetType": "Machine", "MinHosts": 1, "NumCkpts_RAW": 0, "RequestCpus": 1, "RemoteUserCpu": 571737.0d, "BlockWrites": 0, "NiceUser": false, "Out": "process.out", "ImageSize_RAW": 690056, "BytesSent": 3113566.0d, "CumulativeSuspensionTime": 0, "TransferIn": false, "NumCkpts": 0, "Err": "process.err", "RecentBlockWrites": 0, "JobFinishedHookDone": 1446133322, "ProcId": 0, "ImageSize": 750000, "JobUniverse": 5, "EncryptExecuteDirectory": false, "TransferInputSizeMB": 12, "RecentBlockReads": 0, "SpooledOutputFiles": "ChtcWrapperfabp4-0002.out,AuditLog.fabp4-0002,poses.mol2,CURLTIME_4057178,harvest.log,time_elapsed.log,surf_scores.txt,CURLTIME_38803,count.log,fabp4-0002.out,CURLTIME_253463", "NumJobReconnects": 1, "WantFlocking": true, "BlockWriteKbytes": 0, "WhenToTransferOutput": "ON_EXIT_OR_EVICT", "JobCurrentStartExecutingDate": 1445561278, "ExitBySignal": false, "LastMatchTime": 1445561276, "OnExitHold": false, "OrigMaxHosts": 1, "Request
 Memory": 1000, "NumJobStarts": 6, "TerminationPending": true, "TotalSuspensions": 0, "BlockReads": 0, "DAGManJobId": 48940805, "MemoryUsage": "( ( ResidentSetSize + 1023 ) / 1024 )", "ExitCode": 0, "JobNotification": 0, "BlockReadKbytes": 0, "NumJobMatches": 6, "LocalUserCpu": 0.0d, "LastJobStatus": 2, "BufferBlockSize": 32768, "CommittedTime": 572046, "ExecutableSize_RAW": 6, "LastRejMatchReason": "no match found", "LastSuspensionTime": 0, "UserLog": "/home/ssericksen/dude-14-xdock/ChtcRun/dude14-surf-out-esr1/fabp4-0002/process.log", "DAGManNodesMask": "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27", "CumulativeSlotTime": 769511.0d, "LastJobLeaseRenewal": 1446133322, "MachineAttrSlotWeight0": 1, "NumSystemHolds": 0, "BytesRecvd": 8.7351688E7d, "CondorPlatform": "$CondorPlatform: X86_64-RedHat_6.6 $", "JOBGLIDEIN_ResourceName": "$$([IfThenElse(IsUndefined(TARGET.GLIDEIN_ResourceName), IfThenElse(IsUndefined(TARGET.GLIDEIN_Site), \"wisc.edu\", TARGET.GLIDEIN_Site), TARGET.GLIDEIN_ResourceNa
 me)])", "DAGNodeName": "fabp4-0002+fabp4-0002", "PeriodicRelease": "( JobStatus == 5 ) && ( ( CurrentTime - EnteredCurrentStatus ) > 1800 ) && ( JobRunCount < 5 ) && ( HoldReasonCode != 6 ) && ( HoldReasonCode != 14 ) && ( HoldReasonCode != 22 )", "JobRunCount": 7, "LastRemotePool": "condor.biochem.wisc.edu:9618?sock=collector", "JobLastStartDate": 1445546257, "LastRemoteHost": "slot1@cluster-0008.biochem.wisc.edu", "JobPrio": 0, "LocalSysCpu": 0.0d, "ExecutableSize": 7, "RemoteSysCpu": 0.0d, "TransferInput": "/home/ssericksen/dude-14-xdock/ChtcRun/dude14-surf-in/fabp4-0002/,/home/ssericksen/dude-14-xdock/ChtcRun/dude14-surf-in/shared/", "PeriodicHold": false, "WantRemoteIO": true, "CommittedSuspensionTime": 0, "DAGParentNodeNames": "", "CompletionDate": 1446133322, "StreamErr": false, "is_resumable": true, "RecentBlockReadKbytes": 0, "WantRemoteSyscalls": false, "NumShadowStarts": 7, "MachineAttrCpus0": 1, "DAGManNodesLog": "/home/ssericksen/dude-14-xdock/ChtcRun/dude14-surf-out-es
 r1/./mydag.dag.nodes.log", "Owner": "ssericksen", "Requirements": "( ( OpSysMajorVer is 6 ) ) && ( MY.JobUniverse == 12 || MY.JobUniverse == 7 || MY.WantFlocking || MY.WantGlidein || TARGET.PoolName == \"CHTC\" || TARGET.COLLECTOR_HOST_STRING == \"infopool.cs.wisc.edu\" ) && ( TARGET.Arch == \"X86_64\" ) && ( TARGET.OpSys == \"LINUX\" ) && ( TARGET.Disk >= RequestDisk ) && ( TARGET.Memory >= RequestMemory ) && ( TARGET.HasFileTransfer )", "DiskUsage": 35000, "LastRejMatchTime": 1445375317, "JobLeaseDuration": 2400, "ClusterId": 48968872, "BufferSize": 524288, "IsCHTCSubmit": true, "RecentStatsLifetimeStarter": 1200, "CommittedSlotTime": 572046.0d, "Args": "--type=Other --cmdtorun=surflex_run_DUDE_v1.8_esr1.sh --unique=fabp4-0002 --", "Environment": "", "LastPublicClaimId": "<128.104.119.175:9618>#1444067179#3317#...", "Iwd": "/home/ssericksen/dude-14-xdock/ChtcRun/dude14-surf-out-esr1/fabp4-0002", "QDate": 1445354636, "CurrentHosts": 0, "User": "ssericksen@chtc.wisc.edu", "StreamOut
 ": false }
-{ "GlobalJobId": "submit-3.chtc.wisc.edu#49573720.0#1445938922", "StatsLifetimeStarter": 190245, "JobStartDate": 1445943852, "SubmitEventNotes": "DAG Node: 180+180", "JobStatus": 4, "LeaveJobInQueue": false, "StartdPrincipal": "execute-side@matchsession/128.105.245.72", "WantRHEL6": true, "OnExitRemove": true, "JobCurrentStartDate": 1445943852, "CoreSize": 0, "MATCH_EXP_JOBGLIDEIN_ResourceName": "wisc.edu", "Rank": 0.0d, "ExitStatus": 0, "ResidentSetSize": 125000, "RemoteWallClockTime": 190247.0d, "WantCheckpoint": false, "In": "/dev/null", "MaxHosts": 1, "RootDir": "/", "NumRestarts": 0, "RecentBlockWriteKbytes": 0, "DiskUsage_RAW": 1216669, "EnteredCurrentStatus": 1446134099, "ResidentSetSize_RAW": 123680, "RequestDisk": 1000000, "MyType": "Job", "PeriodicRemove": false, "Cmd": "/home/xguo23/finally/Simulation_condor/chtcjobwrapper", "CondorVersion": "$CondorVersion: 8.5.0 Sep 16 2015 BuildID: 341710 $", "ShouldTransferFiles": "YES", "TargetType": "Machine", "MinHosts": 1, "NumCkp
 ts_RAW": 0, "RequestCpus": 1, "RemoteUserCpu": 185236.0d, "BlockWrites": 0, "NiceUser": false, "Out": "process.out", "ImageSize_RAW": 811948, "BytesSent": 30766.0d, "CumulativeSuspensionTime": 0, "TransferIn": false, "NumCkpts": 0, "Err": "process.err", "RecentBlockWrites": 0, "JobFinishedHookDone": 1446134099, "ProcId": 0, "ImageSize": 1000000, "JobUniverse": 5, "EncryptExecuteDirectory": false, "TransferInputSizeMB": 0, "RecentBlockReads": 0, "SpooledOutputFiles": "CURLTIME_3753852,ChtcWrapper180.out,AuditLog.180,simu_3_180.txt,harvest.log,180.out", "BlockWriteKbytes": 0, "WhenToTransferOutput": "ON_EXIT", "JobCurrentStartExecutingDate": 1445943853, "ExitBySignal": false, "LastMatchTime": 1445943852, "OnExitHold": false, "OrigMaxHosts": 1, "RequestMemory": 1000, "NumJobStarts": 1, "TerminationPending": true, "TotalSuspensions": 0, "BlockReads": 0, "DAGManJobId": 49572657, "MemoryUsage": "( ( ResidentSetSize + 1023 ) / 1024 )", "ExitCode": 0, "JobNotification": 0, "BlockReadKbytes"
 : 0, "NumJobMatches": 1, "LocalUserCpu": 0.0d, "LastJobStatus": 2, "BufferBlockSize": 32768, "CommittedTime": 190247, "ExecutableSize_RAW": 6, "LastSuspensionTime": 0, "Matlab": "R2011b", "UserLog": "/home/xguo23/finally/Simulation_condor/model_3/180/process.log", "DAGManNodesMask": "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27", "CumulativeSlotTime": 190247.0d, "LastJobLeaseRenewal": 1446134099, "MachineAttrSlotWeight0": 1, "NumSystemHolds": 0, "BytesRecvd": 284367.0d, "CondorPlatform": "$CondorPlatform: X86_64-RedHat_6.6 $", "JOBGLIDEIN_ResourceName": "$$([IfThenElse(IsUndefined(TARGET.GLIDEIN_ResourceName), IfThenElse(IsUndefined(TARGET.GLIDEIN_Site), \"wisc.edu\", TARGET.GLIDEIN_Site), TARGET.GLIDEIN_ResourceName)])", "DAGNodeName": "180+180", "PeriodicRelease": "( JobStatus == 5 ) && ( ( CurrentTime - EnteredCurrentStatus ) > 1800 ) && ( JobRunCount < 5 ) && ( HoldReasonCode != 6 ) && ( HoldReasonCode != 14 ) && ( HoldReasonCode != 22 )", "JobRunCount": 1, "LastRemoteHost": "slot1@e27
 2.chtc.wisc.edu", "JobPrio": 0, "LocalSysCpu": 0.0d, "ExecutableSize": 7, "RemoteSysCpu": 1835.0d, "TransferInput": "/home/xguo23/finally/Simulation_condor/data/180/,/home/xguo23/finally/Simulation_condor/data/shared/", "PeriodicHold": false, "WantRemoteIO": true, "CommittedSuspensionTime": 0, "DAGParentNodeNames": "", "CompletionDate": 1446134099, "StreamErr": false, "RecentBlockReadKbytes": 0, "WantRemoteSyscalls": false, "NumShadowStarts": 1, "MachineAttrCpus0": 1, "DAGManNodesLog": "/home/xguo23/finally/Simulation_condor/model_3/./mydag.dag.nodes.log", "Owner": "xguo23", "Requirements": "( ( OpSysMajorVer is 6 ) ) && ( MY.JobUniverse == 12 || MY.JobUniverse == 7 || MY.WantFlocking || MY.WantGlidein || TARGET.PoolName == \"CHTC\" || TARGET.COLLECTOR_HOST_STRING == \"infopool.cs.wisc.edu\" ) && ( TARGET.Arch == \"X86_64\" ) && ( TARGET.OpSys == \"LINUX\" ) && ( TARGET.Disk >= RequestDisk ) && ( TARGET.Memory >= RequestMemory ) && ( TARGET.HasFileTransfer )", "DiskUsage": 1250000, 
 "JobLeaseDuration": 2400, "ClusterId": 49573720, "BufferSize": 524288, "IsCHTCSubmit": true, "RecentStatsLifetimeStarter": 1200, "CommittedSlotTime": 190247.0d, "Args": "--type=Matlab --version=R2011b --cmdtorun=simu_condor --unique=180 -- 3", "Environment": "", "LastPublicClaimId": "<128.105.245.72:29075>#1444753997#6000#...", "Iwd": "/home/xguo23/finally/Simulation_condor/model_3/180", "QDate": 1445938922, "CurrentHosts": 0, "User": "xguo23@chtc.wisc.edu", "StreamOut": false }
-{ "GlobalJobId": "submit-3.chtc.wisc.edu#49581952.0#1446105329", "StatsLifetimeStarter": 27674, "JobStartDate": 1446106061, "SubmitEventNotes": "DAG Node: 40+40", "JobStatus": 4, "LeaveJobInQueue": false, "StartdPrincipal": "execute-side@matchsession/128.105.245.86", "WantRHEL6": true, "OnExitRemove": true, "JobCurrentStartDate": 1446106061, "CoreSize": 0, "MATCH_EXP_JOBGLIDEIN_ResourceName": "wisc.edu", "Rank": 0.0d, "ExitStatus": 0, "ResidentSetSize": 150000, "RemoteWallClockTime": 27676.0d, "WantCheckpoint": false, "In": "/dev/null", "MaxHosts": 1, "RootDir": "/", "NumRestarts": 0, "RecentBlockWriteKbytes": 0, "DiskUsage_RAW": 1216669, "EnteredCurrentStatus": 1446133737, "ResidentSetSize_RAW": 127252, "RequestDisk": 1000000, "MyType": "Job", "PeriodicRemove": false, "Cmd": "/home/xguo23/finally_2/Simulation_condor/chtcjobwrapper", "CondorVersion": "$CondorVersion: 8.5.0 Sep 16 2015 BuildID: 341710 $", "ShouldTransferFiles": "YES", "TargetType": "Machine", "MinHosts": 1, "NumCkpts
 _RAW": 0, "RequestCpus": 1, "RemoteUserCpu": 27510.0d, "BlockWrites": 0, "NiceUser": false, "Out": "process.out", "ImageSize_RAW": 811948, "BytesSent": 30584.0d, "CumulativeSuspensionTime": 0, "TransferIn": false, "NumCkpts": 0, "Err": "process.err", "RecentBlockWrites": 0, "JobFinishedHookDone": 1446133737, "ProcId": 0, "ImageSize": 1000000, "JobUniverse": 5, "EncryptExecuteDirectory": false, "TransferInputSizeMB": 0, "RecentBlockReads": 0, "SpooledOutputFiles": "CURLTIME_369560,ChtcWrapper40.out,AuditLog.40,simu_3_40.txt,harvest.log,40.out", "BlockWriteKbytes": 0, "WhenToTransferOutput": "ON_EXIT", "JobCurrentStartExecutingDate": 1446106063, "ExitBySignal": false, "LastMatchTime": 1446106061, "OnExitHold": false, "OrigMaxHosts": 1, "RequestMemory": 1000, "NumJobStarts": 1, "TerminationPending": true, "TotalSuspensions": 0, "BlockReads": 0, "DAGManJobId": 49581933, "MemoryUsage": "( ( ResidentSetSize + 1023 ) / 1024 )", "ExitCode": 0, "JobNotification": 0, "BlockReadKbytes": 0, "Nu
 mJobMatches": 1, "LocalUserCpu": 0.0d, "LastJobStatus": 2, "BufferBlockSize": 32768, "CommittedTime": 27676, "ExecutableSize_RAW": 6, "LastSuspensionTime": 0, "Matlab": "R2011b", "UserLog": "/home/xguo23/finally_2/Simulation_condor/model_3/40/process.log", "DAGManNodesMask": "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27", "CumulativeSlotTime": 27676.0d, "LastJobLeaseRenewal": 1446133737, "MachineAttrSlotWeight0": 1, "NumSystemHolds": 0, "BytesRecvd": 285053.0d, "CondorPlatform": "$CondorPlatform: X86_64-RedHat_6.6 $", "JOBGLIDEIN_ResourceName": "$$([IfThenElse(IsUndefined(TARGET.GLIDEIN_ResourceName), IfThenElse(IsUndefined(TARGET.GLIDEIN_Site), \"wisc.edu\", TARGET.GLIDEIN_Site), TARGET.GLIDEIN_ResourceName)])", "DAGNodeName": "40+40", "PeriodicRelease": "( JobStatus == 5 ) && ( ( CurrentTime - EnteredCurrentStatus ) > 1800 ) && ( JobRunCount < 5 ) && ( HoldReasonCode != 6 ) && ( HoldReasonCode != 14 ) && ( HoldReasonCode != 22 )", "JobRunCount": 1, "LastRemoteHost": "slot1@e286.chtc.wisc
 .edu", "JobPrio": 0, "LocalSysCpu": 0.0d, "ExecutableSize": 7, "RemoteSysCpu": 105.0d, "TransferInput": "/home/xguo23/finally_2/Simulation_condor/data/40/,/home/xguo23/finally_2/Simulation_condor/data/shared/", "PeriodicHold": false, "WantRemoteIO": true, "CommittedSuspensionTime": 0, "DAGParentNodeNames": "", "CompletionDate": 1446133737, "StreamErr": false, "RecentBlockReadKbytes": 0, "WantRemoteSyscalls": false, "NumShadowStarts": 1, "MachineAttrCpus0": 1, "DAGManNodesLog": "/home/xguo23/finally_2/Simulation_condor/model_3/./mydag.dag.nodes.log", "Owner": "xguo23", "Requirements": "( ( OpSysMajorVer is 6 ) ) && ( MY.JobUniverse == 12 || MY.JobUniverse == 7 || MY.WantFlocking || MY.WantGlidein || TARGET.PoolName == \"CHTC\" || TARGET.COLLECTOR_HOST_STRING == \"infopool.cs.wisc.edu\" ) && ( TARGET.Arch == \"X86_64\" ) && ( TARGET.OpSys == \"LINUX\" ) && ( TARGET.Disk >= RequestDisk ) && ( TARGET.Memory >= RequestMemory ) && ( TARGET.HasFileTransfer )", "DiskUsage": 1250000, "JobLea
 seDuration": 2400, "ClusterId": 49581952, "BufferSize": 524288, "IsCHTCSubmit": true, "RecentStatsLifetimeStarter": 1200, "CommittedSlotTime": 27676.0d, "Args": "--type=Matlab --version=R2011b --cmdtorun=simu_condor --unique=40 -- 3", "Environment": "", "LastPublicClaimId": "<128.105.245.86:32129>#1444759888#6329#...", "Iwd": "/home/xguo23/finally_2/Simulation_condor/model_3/40", "QDate": 1446105329, "CurrentHosts": 0, "User": "xguo23@chtc.wisc.edu", "StreamOut": false }
-{ "GlobalJobId": "submit-3.chtc.wisc.edu#49581985.0#1446105368", "StatsLifetimeStarter": 26354, "JobStartDate": 1446106289, "SubmitEventNotes": "DAG Node: 36+36", "JobStatus": 4, "LeaveJobInQueue": false, "StartdPrincipal": "execute-side@matchsession/128.105.244.249", "WantRHEL6": true, "OnExitRemove": true, "JobCurrentStartDate": 1446106289, "CoreSize": 0, "MATCH_EXP_JOBGLIDEIN_ResourceName": "wisc.edu", "Rank": 0.0d, "ExitStatus": 0, "ResidentSetSize": 150000, "RemoteWallClockTime": 26357.0d, "WantCheckpoint": false, "In": "/dev/null", "MaxHosts": 1, "RootDir": "/", "NumRestarts": 0, "RecentBlockWriteKbytes": 0, "DiskUsage_RAW": 1216669, "EnteredCurrentStatus": 1446132646, "ResidentSetSize_RAW": 127452, "RequestDisk": 1000000, "MyType": "Job", "PeriodicRemove": false, "Cmd": "/home/xguo23/finally_2/Simulation_condor/chtcjobwrapper", "CondorVersion": "$CondorVersion: 8.5.0 Sep 16 2015 BuildID: 341710 $", "ShouldTransferFiles": "YES", "TargetType": "Machine", "MinHosts": 1, "NumCkpt
 s_RAW": 0, "RequestCpus": 1, "RemoteUserCpu": 26239.0d, "BlockWrites": 0, "NiceUser": false, "Out": "process.out", "ImageSize_RAW": 811948, "BytesSent": 31898.0d, "CumulativeSuspensionTime": 0, "TransferIn": false, "NumCkpts": 0, "Err": "process.err", "RecentBlockWrites": 0, "JobFinishedHookDone": 1446132646, "ProcId": 0, "ImageSize": 1000000, "JobUniverse": 5, "EncryptExecuteDirectory": false, "TransferInputSizeMB": 0, "RecentBlockReads": 0, "SpooledOutputFiles": "CURLTIME_1151700,ChtcWrapper36.out,AuditLog.36,simu_3_36.txt,harvest.log,36.out", "BlockWriteKbytes": 0, "WhenToTransferOutput": "ON_EXIT", "JobCurrentStartExecutingDate": 1446106291, "ExitBySignal": false, "LastMatchTime": 1446106289, "OnExitHold": false, "OrigMaxHosts": 1, "RequestMemory": 1000, "NumJobStarts": 1, "TerminationPending": true, "TotalSuspensions": 0, "BlockReads": 0, "DAGManJobId": 49581933, "MemoryUsage": "( ( ResidentSetSize + 1023 ) / 1024 )", "ExitCode": 0, "JobNotification": 0, "BlockReadKbytes": 0, "
 NumJobMatches": 1, "LocalUserCpu": 0.0d, "LastJobStatus": 2, "BufferBlockSize": 32768, "CommittedTime": 26357, "ExecutableSize_RAW": 6, "LastSuspensionTime": 0, "Matlab": "R2011b", "UserLog": "/home/xguo23/finally_2/Simulation_condor/model_3/36/process.log", "DAGManNodesMask": "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27", "CumulativeSlotTime": 26357.0d, "LastJobLeaseRenewal": 1446132646, "MachineAttrSlotWeight0": 1, "NumSystemHolds": 0, "BytesRecvd": 285053.0d, "CondorPlatform": "$CondorPlatform: X86_64-RedHat_6.6 $", "JOBGLIDEIN_ResourceName": "$$([IfThenElse(IsUndefined(TARGET.GLIDEIN_ResourceName), IfThenElse(IsUndefined(TARGET.GLIDEIN_Site), \"wisc.edu\", TARGET.GLIDEIN_Site), TARGET.GLIDEIN_ResourceName)])", "DAGNodeName": "36+36", "PeriodicRelease": "( JobStatus == 5 ) && ( ( CurrentTime - EnteredCurrentStatus ) > 1800 ) && ( JobRunCount < 5 ) && ( HoldReasonCode != 6 ) && ( HoldReasonCode != 14 ) && ( HoldReasonCode != 22 )", "JobRunCount": 1, "LastRemoteHost": "slot1@e457.chtc.wi
 sc.edu", "JobPrio": 0, "LocalSysCpu": 0.0d, "ExecutableSize": 7, "RemoteSysCpu": 96.0d, "TransferInput": "/home/xguo23/finally_2/Simulation_condor/data/36/,/home/xguo23/finally_2/Simulation_condor/data/shared/", "PeriodicHold": false, "WantRemoteIO": true, "CommittedSuspensionTime": 0, "DAGParentNodeNames": "", "CompletionDate": 1446132646, "StreamErr": false, "RecentBlockReadKbytes": 0, "WantRemoteSyscalls": false, "NumShadowStarts": 1, "MachineAttrCpus0": 1, "DAGManNodesLog": "/home/xguo23/finally_2/Simulation_condor/model_3/./mydag.dag.nodes.log", "Owner": "xguo23", "Requirements": "( ( OpSysMajorVer is 6 ) ) && ( MY.JobUniverse == 12 || MY.JobUniverse == 7 || MY.WantFlocking || MY.WantGlidein || TARGET.PoolName == \"CHTC\" || TARGET.COLLECTOR_HOST_STRING == \"infopool.cs.wisc.edu\" ) && ( TARGET.Arch == \"X86_64\" ) && ( TARGET.OpSys == \"LINUX\" ) && ( TARGET.Disk >= RequestDisk ) && ( TARGET.Memory >= RequestMemory ) && ( TARGET.HasFileTransfer )", "DiskUsage": 1250000, "JobLe
 aseDuration": 2400, "ClusterId": 49581985, "BufferSize": 524288, "IsCHTCSubmit": true, "RecentStatsLifetimeStarter": 1200, "CommittedSlotTime": 26357.0d, "Args": "--type=Matlab --version=R2011b --cmdtorun=simu_condor --unique=36 -- 3", "Environment": "", "LastPublicClaimId": "<128.105.244.249:28476>#1444685646#10655#...", "Iwd": "/home/xguo23/finally_2/Simulation_condor/model_3/36", "QDate": 1446105368, "CurrentHosts": 0, "User": "xguo23@chtc.wisc.edu", "StreamOut": false }
-{ "GlobalJobId": "submit-3.chtc.wisc.edu#49581989.0#1446105374", "StatsLifetimeStarter": 27490, "JobStartDate": 1446106290, "SubmitEventNotes": "DAG Node: 82+82", "JobStatus": 4, "LeaveJobInQueue": false, "AutoClusterId": 24, "StartdPrincipal": "execute-side@matchsession/128.105.245.233", "WantRHEL6": true, "OnExitRemove": true, "JobCurrentStartDate": 1446106290, "CoreSize": 0, "MATCH_EXP_JOBGLIDEIN_ResourceName": "wisc.edu", "ExitStatus": 0, "Rank": 0.0d, "ResidentSetSize": 150000, "RemoteWallClockTime": 27491.0d, "WantCheckpoint": false, "In": "/dev/null", "MaxHosts": 1, "RootDir": "/", "NumRestarts": 0, "RecentBlockWriteKbytes": 0, "DiskUsage_RAW": 1216669, "EnteredCurrentStatus": 1446133781, "ResidentSetSize_RAW": 126932, "RequestDisk": 1000000, "MyType": "Job", "PeriodicRemove": false, "Cmd": "/home/xguo23/finally_2/Simulation_condor/chtcjobwrapper", "CondorVersion": "$CondorVersion: 8.5.0 Sep 16 2015 BuildID: 341710 $", "ShouldTransferFiles": "YES", "TargetType": "Machine", "M
 inHosts": 1, "NumCkpts_RAW": 0, "RequestCpus": 1, "RemoteUserCpu": 27288.0d, "BlockWrites": 0, "NiceUser": false, "Out": "process.out", "ImageSize_RAW": 811948, "BytesSent": 30553.0d, "CumulativeSuspensionTime": 0, "TransferIn": false, "AutoClusterAttrs": "JobUniverse,LastCheckpointPlatform,NumCkpts,ClientMachine,_condor_RequestCpus,_condor_RequestDisk,_condor_RequestGPUs,_condor_RequestMemory,RequestCpus,RequestDisk,RequestGPUs,RequestMemory,BIOCHEM,MachineLastMatchTime,ConcurrencyLimits,NiceUser,Rank,Requirements,ImageSize,MemoryRequirements,User,RemoteGroup,SubmitterGroup,SubmitterUserPrio,Group,WIDsTheme,InteractiveJob,Is_Resumable,WantFlocking,WantGlidein,Scheduler,Owner,JobStart,MemoryUsage,IsExpressQueueJob,DiskUsage,HEP_VO,IsDesktop,OSG_VO,x509userproxysubject,PassedTest,IsLocalCMSJob,IsLocalCMSSlot,IsSAMSlot,IsSAMJob,MaxDiskTempC,IsDedicated,estimated_run_hours,IsCHTCSubmit,RequiresCVMFS,DynamicSlot,PartitionableSlot,Slot1_ExpectedMachineGracefulDrainingCompletion,Slot1_Job
 Starts,Slot1_SelfMonitorAge,Slot2_ExpectedMachineGracefulDrainingCompletion,Slot2_JobStarts,Slot2_SelfMonitorAge,Slot3_ExpectedMachineGracefulDrainingCompletion,Slot3_JobStarts,Slot3_SelfMonitorAge,Slot4_ExpectedMachineGracefulDrainingCompletion,Slot4_JobStarts,Slot4_SelfMonitorAge,Slot5_ExpectedMachineGracefulDrainingCompletion,Slot5_JobStarts,Slot5_SelfMonitorAge,Slot6_ExpectedMachineGracefulDrainingCompletion,Slot6_JobStarts,Slot6_SelfMonitorAge,Slot7_ExpectedMachineGracefulDrainingCompletion,Slot7_JobStarts,Slot7_SelfMonitorAge,Slot8_ExpectedMachineGracefulDrainingCompletion,Slot8_JobStarts,Slot8_SelfMonitorAge,Slot1_TotalTimeClaimedBusy,Slot1_TotalTimeUnclaimedIdle,Slot2_TotalTimeClaimedBusy,Slot2_TotalTimeUnclaimedIdle,Slot3_TotalTimeClaimedBusy,Slot3_TotalTimeUnclaimedIdle,Slot4_TotalTimeClaimedBusy,Slot4_TotalTimeUnclaimedIdle,Slot5_TotalTimeClaimedBusy,Slot5_TotalTimeUnclaimedIdle,Slot6_TotalTimeClaimedBusy,Slot6_TotalTimeUnclaimedIdle,Slot7_TotalTimeClaimedBusy,Slot7_Total
 TimeUnclaimedIdle,Slot8_TotalTimeClaimedBusy,Slot8_TotalTimeUnclaimedIdle,Slot10_ExpectedMachineGracefulDrainingCompletion,Slot10_JobStarts,Slot10_SelfMonitorAge,Slot11_ExpectedMachineGracefulDrainingCompletion,Slot11_JobStarts,Slot11_SelfMonitorAge,Slot12_ExpectedMachineGracefulDrainingCompletion,Slot12_JobStarts,Slot12_SelfMonitorAge,Slot9_ExpectedMachineGracefulDrainingCompletion,Slot9_JobStarts,Slot9_SelfMonitorAge,Slot12_TotalTimeClaimedBusy,Slot10_TotalTimeClaimedBusy,Slot10_TotalTimeUnclaimedIdle,Slot11_TotalTimeClaimedBusy,Slot11_TotalTimeUnclaimedIdle,Slot12_TotalTimeUnclaimedIdle,Slot9_TotalTimeClaimedBusy,Slot9_TotalTimeUnclaimedIdle,Slot13_ExpectedMachineGracefulDrainingCompletion,Slot13_JobStarts,Slot13_SelfMonitorAge,Slot14_ExpectedMachineGracefulDrainingCompletion,Slot14_JobStarts,Slot14_SelfMonitorAge,Slot15_ExpectedMachineGracefulDrainingCompletion,Slot15_JobStarts,Slot15_SelfMonitorAge,Slot16_ExpectedMachineGracefulDrainingCompletion,Slot16_JobStarts,Slot16_SelfMon
 itorAge,IsResumable,WHEN_TO_TRANSFER_OUTPUT,_condor_Requestadmin_mutex_1,_condor_Requestadmin_mutex_2,_condor_Requestadmin_mutex_3,_condor_Requestmachine_token,Requestadmin_mutex_1,Requestadmin_mutex_2,Requestadmin_mutex_3,Requestmachine_token,nyehle,IsBuildJob,IsMatlabBuildJob,TotalJobRunTime,NodeOnline,Slot13_TotalTimeClaimedBusy,Slot13_TotalTimeUnclaimedIdle,Slot14_TotalTimeClaimedBusy,Slot14_TotalTimeUnclaimedIdle,Slot15_TotalTimeClaimedBusy,Slot15_TotalTimeUnclaimedIdle,Slot16_TotalTimeClaimedBusy,Slot16_TotalTimeUnclaimedIdle,TmpIsFull,trResumable,RequiresCMSFrontier,Slot17_ExpectedMachineGracefulDrainingCompletion,Slot17_JobStarts,Slot17_SelfMonitorAge,Slot17_TotalTimeClaimedBusy,Slot17_TotalTimeUnclaimedIdle,Slot18_ExpectedMachineGracefulDrainingCompletion,Slot18_JobStarts,Slot18_SelfMonitorAge,Slot18_TotalTimeClaimedBusy,Slot18_TotalTimeUnclaimedIdle,Slot19_ExpectedMachineGracefulDrainingCompletion,Slot19_JobStarts,Slot19_SelfMonitorAge,Slot19_TotalTimeClaimedBusy,Slot19_To
 talTimeUnclaimedIdle,Slot20_ExpectedMachineGracefulDrainingCompletion,Slot20_JobStarts,Slot20_SelfMonitorAge,Slot20_TotalTimeClaimedBusy,Slot20_TotalTimeUnclaimedIdle,Slot21_ExpectedMachineGracefulDrainingCompletion,Slot21_JobStarts,Slot21_SelfMonitorAge,Slot21_TotalTimeClaimedBusy,Slot21_TotalTimeUnclaimedIdle,Slot22_ExpectedMachineGracefulDrainingCompletion,Slot22_JobStarts,Slot22_SelfMonitorAge,Slot22_TotalTimeClaimedBusy,Slot22_TotalTimeUnclaimedIdle,Slot23_ExpectedMachineGracefulDrainingCompletion,Slot23_JobStarts,Slot23_SelfMonitorAge,Slot23_TotalTimeClaimedBusy,Slot23_TotalTimeUnclaimedIdle,Slot24_ExpectedMachineGracefulDrainingCompletion,Slot24_JobStarts,Slot24_SelfMonitorAge,Slot24_TotalTimeClaimedBusy,Slot24_TotalTimeUnclaimedIdle,Slot25_ExpectedMachineGracefulDrainingCompletion,Slot25_JobStarts,Slot25_SelfMonitorAge,Slot25_TotalTimeClaimedBusy,Slot25_TotalTimeUnclaimedIdle,Slot26_ExpectedMachineGracefulDrainingCompletion,Slot26_JobStarts,Slot26_SelfMonitorAge,Slot26_Total
 TimeClaimedBusy,Slot26_TotalTimeUnclaimedIdle,Slot27_ExpectedMachineGracefulDrainingCompletion,Slot27_JobStarts,Slot27_SelfMonitorAge,Slot27_TotalTimeClaimedBusy,Slot27_TotalTimeUnclaimedIdle,Slot28_ExpectedMachineGracefulDrainingCompletion,Slot28_JobStarts,Slot28_SelfMonitorAge,Slot28_TotalTimeClaimedBusy,Slot28_TotalTimeUnclaimedIdle,Slot29_ExpectedMachineGracefulDrainingCompletion,Slot29_JobStarts,Slot29_SelfMonitorAge,Slot29_TotalTimeClaimedBusy,Slot29_TotalTimeUnclaimedIdle,Slot30_ExpectedMachineGracefulDrainingCompletion,Slot30_JobStarts,Slot30_SelfMonitorAge,Slot30_TotalTimeClaimedBusy,Slot30_TotalTimeUnclaimedIdle,Slot31_ExpectedMachineGracefulDrainingCompletion,Slot31_JobStarts,Slot31_SelfMonitorAge,Slot31_TotalTimeClaimedBusy,Slot31_TotalTimeUnclaimedIdle,Slot32_ExpectedMachineGracefulDrainingCompletion,Slot32_JobStarts,Slot32_SelfMonitorAge,Slot32_TotalTimeClaimedBusy,Slot32_TotalTimeUnclaimedIdle,ResidentSetSize", "NumCkpts": 0, "Err": "process.err", "RecentBlockWrites":
  0, "JobFinishedHookDone": 1446133782, "ProcId": 0, "ImageSize": 1000000, "JobUniverse": 5, "EncryptExecuteDirectory": false, "TransferInputSizeMB": 0, "RecentBlockReads": 0, "SpooledOutputFiles": "CURLTIME_4096502,ChtcWrapper82.out,AuditLog.82,simu_3_82.txt,harvest.log,82.out", "BlockWriteKbytes": 0, "WhenToTransferOutput": "ON_EXIT", "JobCurrentStartExecutingDate": 1446106291, "ExitBySignal": false, "LastMatchTime": 1446106290, "OnExitHold": false, "OrigMaxHosts": 1, "RequestMemory": 1000, "NumJobStarts": 1, "TerminationPending": true, "TotalSuspensions": 0, "BlockReads": 0, "DAGManJobId": 49581933, "MemoryUsage": "( ( ResidentSetSize + 1023 ) / 1024 )", "ExitCode": 0, "JobNotification": 0, "BlockReadKbytes": 0, "NumJobMatches": 1, "LocalUserCpu": 0.0d, "LastJobStatus": 2, "BufferBlockSize": 32768, "CommittedTime": 27491, "ExecutableSize_RAW": 6, "LastSuspensionTime": 0, "Matlab": "R2011b", "UserLog": "/home/xguo23/finally_2/Simulation_condor/model_3/82/process.log", "DAGManNodesM
 ask": "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27", "CumulativeSlotTime": 27491.0d, "LastJobLeaseRenewal": 1446133781, "MachineAttrSlotWeight0": 1, "NumSystemHolds": 0, "BytesRecvd": 285053.0d, "CondorPlatform": "$CondorPlatform: X86_64-RedHat_6.6 $", "JOBGLIDEIN_ResourceName": "$$([IfThenElse(IsUndefined(TARGET.GLIDEIN_ResourceName), IfThenElse(IsUndefined(TARGET.GLIDEIN_Site), \"wisc.edu\", TARGET.GLIDEIN_Site), TARGET.GLIDEIN_ResourceName)])", "DAGNodeName": "82+82", "PeriodicRelease": "( JobStatus == 5 ) && ( ( CurrentTime - EnteredCurrentStatus ) > 1800 ) && ( JobRunCount < 5 ) && ( HoldReasonCode != 6 ) && ( HoldReasonCode != 14 ) && ( HoldReasonCode != 22 )", "JobRunCount": 1, "LastRemoteHost": "slot1@e433.chtc.wisc.edu", "JobPrio": 0, "LocalSysCpu": 0.0d, "ExecutableSize": 7, "RemoteSysCpu": 173.0d, "TransferInput": "/home/xguo23/finally_2/Simulation_condor/data/82/,/home/xguo23/finally_2/Simulation_condor/data/shared/", "PeriodicHold": false, "WantRemoteIO": true, "CommittedSusp
 ensionTime": 0, "DAGParentNodeNames": "", "CompletionDate": 1446133781, "StreamErr": false, "RecentBlockReadKbytes": 0, "WantRemoteSyscalls": false, "NumShadowStarts": 1, "MachineAttrCpus0": 1, "DAGManNodesLog": "/home/xguo23/finally_2/Simulation_condor/model_3/./mydag.dag.nodes.log", "Owner": "xguo23", "Requirements": "( ( OpSysMajorVer is 6 ) ) && ( MY.JobUniverse == 12 || MY.JobUniverse == 7 || MY.WantFlocking || MY.WantGlidein || TARGET.PoolName == \"CHTC\" || TARGET.COLLECTOR_HOST_STRING == \"infopool.cs.wisc.edu\" ) && ( TARGET.Arch == \"X86_64\" ) && ( TARGET.OpSys == \"LINUX\" ) && ( TARGET.Disk >= RequestDisk ) && ( TARGET.Memory >= RequestMemory ) && ( TARGET.HasFileTransfer )", "DiskUsage": 1250000, "JobLeaseDuration": 2400, "ClusterId": 49581989, "BufferSize": 524288, "IsCHTCSubmit": true, "RecentStatsLifetimeStarter": 1200, "CommittedSlotTime": 27491.0d, "Args": "--type=Matlab --version=R2011b --cmdtorun=simu_condor --unique=82 -- 3", "Environment": "", "LastPublicClaim
 Id": "<128.105.245.233:28601>#1443991451#13496#...", "Iwd": "/home/xguo23/finally_2/Simulation_condor/model_3/82", "QDate": 1446105374, "CurrentHosts": 0, "User": "xguo23@chtc.wisc.edu", "StreamOut": false }
-{ "GlobalJobId": "submit-3.chtc.wisc.edu#49582049.0#1446105441", "StatsLifetimeStarter": 26296, "JobStartDate": 1446106482, "SubmitEventNotes": "DAG Node: 112+112", "JobStatus": 4, "LeaveJobInQueue": false, "StartdPrincipal": "execute-side@matchsession/128.105.245.245", "WantRHEL6": true, "OnExitRemove": true, "JobCurrentStartDate": 1446106482, "CoreSize": 0, "MATCH_EXP_JOBGLIDEIN_ResourceName": "wisc.edu", "Rank": 0.0d, "ExitStatus": 0, "ResidentSetSize": 150000, "RemoteWallClockTime": 26298.0d, "WantCheckpoint": false, "In": "/dev/null", "MaxHosts": 1, "RootDir": "/", "NumRestarts": 0, "RecentBlockWriteKbytes": 0, "DiskUsage_RAW": 1216669, "EnteredCurrentStatus": 1446132780, "ResidentSetSize_RAW": 126892, "RequestDisk": 1000000, "MyType": "Job", "PeriodicRemove": false, "Cmd": "/home/xguo23/finally_2/Simulation_condor/chtcjobwrapper", "CondorVersion": "$CondorVersion: 8.5.0 Sep 16 2015 BuildID: 341710 $", "ShouldTransferFiles": "YES", "TargetType": "Machine", "MinHosts": 1, "NumCk
 pts_RAW": 0, "RequestCpus": 1, "RemoteUserCpu": 26097.0d, "BlockWrites": 0, "NiceUser": false, "Out": "process.out", "ImageSize_RAW": 811948, "BytesSent": 31904.0d, "CumulativeSuspensionTime": 0, "TransferIn": false, "NumCkpts": 0, "Err": "process.err", "RecentBlockWrites": 0, "JobFinishedHookDone": 1446132780, "ProcId": 0, "ImageSize": 1000000, "JobUniverse": 5, "EncryptExecuteDirectory": false, "TransferInputSizeMB": 0, "RecentBlockReads": 0, "SpooledOutputFiles": "CURLTIME_2601607,ChtcWrapper112.out,AuditLog.112,simu_3_112.txt,harvest.log,112.out", "BlockWriteKbytes": 0, "WhenToTransferOutput": "ON_EXIT", "JobCurrentStartExecutingDate": 1446106484, "ExitBySignal": false, "LastMatchTime": 1446106482, "OnExitHold": false, "OrigMaxHosts": 1, "RequestMemory": 1000, "NumJobStarts": 1, "TerminationPending": true, "TotalSuspensions": 0, "BlockReads": 0, "DAGManJobId": 49581933, "MemoryUsage": "( ( ResidentSetSize + 1023 ) / 1024 )", "ExitCode": 0, "JobNotification": 0, "BlockReadKbytes"
 : 0, "NumJobMatches": 1, "LocalUserCpu": 0.0d, "LastJobStatus": 2, "BufferBlockSize": 32768, "CommittedTime": 26298, "ExecutableSize_RAW": 6, "LastSuspensionTime": 0, "Matlab": "R2011b", "UserLog": "/home/xguo23/finally_2/Simulation_condor/model_3/112/process.log", "DAGManNodesMask": "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27", "CumulativeSlotTime": 26298.0d, "LastJobLeaseRenewal": 1446132780, "MachineAttrSlotWeight0": 1, "NumSystemHolds": 0, "BytesRecvd": 285054.0d, "CondorPlatform": "$CondorPlatform: X86_64-RedHat_6.6 $", "JOBGLIDEIN_ResourceName": "$$([IfThenElse(IsUndefined(TARGET.GLIDEIN_ResourceName), IfThenElse(IsUndefined(TARGET.GLIDEIN_Site), \"wisc.edu\", TARGET.GLIDEIN_Site), TARGET.GLIDEIN_ResourceName)])", "DAGNodeName": "112+112", "PeriodicRelease": "( JobStatus == 5 ) && ( ( CurrentTime - EnteredCurrentStatus ) > 1800 ) && ( JobRunCount < 5 ) && ( HoldReasonCode != 6 ) && ( HoldReasonCode != 14 ) && ( HoldReasonCode != 22 )", "JobRunCount": 1, "LastRemoteHost": "slot1@e44
 5.chtc.WISC.EDU", "JobPrio": 0, "LocalSysCpu": 0.0d, "ExecutableSize": 7, "RemoteSysCpu": 164.0d, "TransferInput": "/home/xguo23/finally_2/Simulation_condor/data/112/,/home/xguo23/finally_2/Simulation_condor/data/shared/", "PeriodicHold": false, "WantRemoteIO": true, "CommittedSuspensionTime": 0, "DAGParentNodeNames": "", "CompletionDate": 1446132780, "StreamErr": false, "RecentBlockReadKbytes": 0, "WantRemoteSyscalls": false, "NumShadowStarts": 1, "MachineAttrCpus0": 1, "DAGManNodesLog": "/home/xguo23/finally_2/Simulation_condor/model_3/./mydag.dag.nodes.log", "Owner": "xguo23", "Requirements": "( ( OpSysMajorVer is 6 ) ) && ( MY.JobUniverse == 12 || MY.JobUniverse == 7 || MY.WantFlocking || MY.WantGlidein || TARGET.PoolName == \"CHTC\" || TARGET.COLLECTOR_HOST_STRING == \"infopool.cs.wisc.edu\" ) && ( TARGET.Arch == \"X86_64\" ) && ( TARGET.OpSys == \"LINUX\" ) && ( TARGET.Disk >= RequestDisk ) && ( TARGET.Memory >= RequestMemory ) && ( TARGET.HasFileTransfer )", "DiskUsage": 1250
 000, "JobLeaseDuration": 2400, "ClusterId": 49582049, "BufferSize": 524288, "IsCHTCSubmit": true, "RecentStatsLifetimeStarter": 1200, "CommittedSlotTime": 26298.0d, "Args": "--type=Matlab --version=R2011b --cmdtorun=simu_condor --unique=112 -- 3", "Environment": "", "LastPublicClaimId": "<128.105.245.245:48407>#1443991450#14631#...", "Iwd": "/home/xguo23/finally_2/Simulation_condor/model_3/112", "QDate": 1446105441, "CurrentHosts": 0, "User": "xguo23@chtc.wisc.edu", "StreamOut": false }
-{ "GlobalJobId": "submit-3.chtc.wisc.edu#49582050.0#1446105441", "StatsLifetimeStarter": 27141, "JobStartDate": 1446106482, "SubmitEventNotes": "DAG Node: 301+301", "JobStatus": 4, "LeaveJobInQueue": false, "StartdPrincipal": "execute-side@matchsession/128.105.245.172", "WantRHEL6": true, "OnExitRemove": true, "JobCurrentStartDate": 1446106482, "CoreSize": 0, "MATCH_EXP_JOBGLIDEIN_ResourceName": "wisc.edu", "Rank": 0.0d, "ExitStatus": 0, "ResidentSetSize": 150000, "RemoteWallClockTime": 27143.0d, "WantCheckpoint": false, "In": "/dev/null", "MaxHosts": 1, "RootDir": "/", "NumRestarts": 0, "RecentBlockWriteKbytes": 0, "DiskUsage_RAW": 1216669, "EnteredCurrentStatus": 1446133625, "ResidentSetSize_RAW": 126464, "RequestDisk": 1000000, "MyType": "Job", "PeriodicRemove": false, "Cmd": "/home/xguo23/finally_2/Simulation_condor/chtcjobwrapper", "CondorVersion": "$CondorVersion: 8.5.0 Sep 16 2015 BuildID: 341710 $", "ShouldTransferFiles": "YES", "TargetType": "Machine", "MinHosts": 1, "NumCk
 pts_RAW": 0, "RequestCpus": 1, "RemoteUserCpu": 26895.0d, "BlockWrites": 0, "NiceUser": false, "Out": "process.out", "ImageSize_RAW": 811948, "BytesSent": 31905.0d, "CumulativeSuspensionTime": 0, "TransferIn": false, "NumCkpts": 0, "Err": "process.err", "RecentBlockWrites": 0, "JobFinishedHookDone": 1446133625, "ProcId": 0, "ImageSize": 1000000, "JobUniverse": 5, "EncryptExecuteDirectory": false, "TransferInputSizeMB": 0, "RecentBlockReads": 0, "SpooledOutputFiles": "CURLTIME_2158419,ChtcWrapper301.out,AuditLog.301,simu_3_301.txt,harvest.log,301.out", "BlockWriteKbytes": 0, "WhenToTransferOutput": "ON_EXIT", "JobCurrentStartExecutingDate": 1446106484, "ExitBySignal": false, "LastMatchTime": 1446106482, "OnExitHold": false, "OrigMaxHosts": 1, "RequestMemory": 1000, "NumJobStarts": 1, "TerminationPending": true, "TotalSuspensions": 0, "BlockReads": 0, "DAGManJobId": 49581933, "MemoryUsage": "( ( ResidentSetSize + 1023 ) / 1024 )", "ExitCode": 0, "JobNotification": 0, "BlockReadKbytes"
 : 0, "NumJobMatches": 1, "LocalUserCpu": 0.0d, "LastJobStatus": 2, "BufferBlockSize": 32768, "CommittedTime": 27143, "ExecutableSize_RAW": 6, "LastSuspensionTime": 0, "Matlab": "R2011b", "UserLog": "/home/xguo23/finally_2/Simulation_condor/model_3/301/process.log", "DAGManNodesMask": "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27", "CumulativeSlotTime": 27143.0d, "LastJobLeaseRenewal": 1446133625, "MachineAttrSlotWeight0": 1, "NumSystemHolds": 0, "BytesRecvd": 285054.0d, "CondorPlatform": "$CondorPlatform: X86_64-RedHat_6.6 $", "JOBGLIDEIN_ResourceName": "$$([IfThenElse(IsUndefined(TARGET.GLIDEIN_ResourceName), IfThenElse(IsUndefined(TARGET.GLIDEIN_Site), \"wisc.edu\", TARGET.GLIDEIN_Site), TARGET.GLIDEIN_ResourceName)])", "DAGNodeName": "301+301", "PeriodicRelease": "( JobStatus == 5 ) && ( ( CurrentTime - EnteredCurrentStatus ) > 1800 ) && ( JobRunCount < 5 ) && ( HoldReasonCode != 6 ) && ( HoldReasonCode != 14 ) && ( HoldReasonCode != 22 )", "JobRunCount": 1, "LastRemoteHost": "slot1@e37
 2.chtc.wisc.edu", "JobPrio": 0, "LocalSysCpu": 0.0d, "ExecutableSize": 7, "RemoteSysCpu": 201.0d, "TransferInput": "/home/xguo23/finally_2/Simulation_condor/data/301/,/home/xguo23/finally_2/Simulation_condor/data/shared/", "PeriodicHold": false, "WantRemoteIO": true, "CommittedSuspensionTime": 0, "DAGParentNodeNames": "", "CompletionDate": 1446133625, "StreamErr": false, "RecentBlockReadKbytes": 0, "WantRemoteSyscalls": false, "NumShadowStarts": 1, "MachineAttrCpus0": 1, "DAGManNodesLog": "/home/xguo23/finally_2/Simulation_condor/model_3/./mydag.dag.nodes.log", "Owner": "xguo23", "Requirements": "( ( OpSysMajorVer is 6 ) ) && ( MY.JobUniverse == 12 || MY.JobUniverse == 7 || MY.WantFlocking || MY.WantGlidein || TARGET.PoolName == \"CHTC\" || TARGET.COLLECTOR_HOST_STRING == \"infopool.cs.wisc.edu\" ) && ( TARGET.Arch == \"X86_64\" ) && ( TARGET.OpSys == \"LINUX\" ) && ( TARGET.Disk >= RequestDisk ) && ( TARGET.Memory >= RequestMemory ) && ( TARGET.HasFileTransfer )", "DiskUsage": 1250
 000, "JobLeaseDuration": 2400, "ClusterId": 49582050, "BufferSize": 524288, "IsCHTCSubmit": true, "RecentStatsLifetimeStarter": 1200, "CommittedSlotTime": 27143.0d, "Args": "--type=Matlab --version=R2011b --cmdtorun=simu_condor --unique=301 -- 3", "Environment": "", "LastPublicClaimId": "<128.105.245.172:19856>#1444760019#9307#...", "Iwd": "/home/xguo23/finally_2/Simulation_condor/model_3/301", "QDate": 1446105441, "CurrentHosts": 0, "User": "xguo23@chtc.wisc.edu", "StreamOut": false }
-{ "GlobalJobId": "submit-3.chtc.wisc.edu#49582065.0#1446105458", "StatsLifetimeStarter": 25606, "JobStartDate": 1446107042, "SubmitEventNotes": "DAG Node: 401+401", "JobStatus": 4, "LeaveJobInQueue": false, "AutoClusterId": 24, "StartdPrincipal": "execute-side@matchsession/128.105.245.206", "WantRHEL6": true, "OnExitRemove": true, "JobCurrentStartDate": 1446107042, "CoreSize": 0, "MATCH_EXP_JOBGLIDEIN_ResourceName": "wisc.edu", "ExitStatus": 0, "Rank": 0.0d, "ResidentSetSize": 150000, "RemoteWallClockTime": 25607.0d, "WantCheckpoint": false, "In": "/dev/null", "MaxHosts": 1, "RootDir": "/", "NumRestarts": 0, "RecentBlockWriteKbytes": 0, "DiskUsage_RAW": 1216669, "EnteredCurrentStatus": 1446132649, "ResidentSetSize_RAW": 126608, "RequestDisk": 1000000, "MyType": "Job", "PeriodicRemove": false, "Cmd": "/home/xguo23/finally_2/Simulation_condor/chtcjobwrapper", "CondorVersion": "$CondorVersion: 8.5.0 Sep 16 2015 BuildID: 341710 $", "ShouldTransferFiles": "YES", "TargetType": "Machine", 
 "MinHosts": 1, "NumCkpts_RAW": 0, "RequestCpus": 1, "RemoteUserCpu": 25478.0d, "BlockWrites": 0, "NiceUser": false, "Out": "process.out", "ImageSize_RAW": 811948, "BytesSent": 30661.0d, "CumulativeSuspensionTime": 0, "TransferIn": false, "AutoClusterAttrs": "JobUniverse,LastCheckpointPlatform,NumCkpts,ClientMachine,_condor_RequestCpus,_condor_RequestDisk,_condor_RequestGPUs,_condor_RequestMemory,RequestCpus,RequestDisk,RequestGPUs,RequestMemory,BIOCHEM,MachineLastMatchTime,ConcurrencyLimits,NiceUser,Rank,Requirements,ImageSize,MemoryRequirements,User,RemoteGroup,SubmitterGroup,SubmitterUserPrio,Group,WIDsTheme,InteractiveJob,Is_Resumable,WantFlocking,WantGlidein,Scheduler,Owner,JobStart,MemoryUsage,IsExpressQueueJob,DiskUsage,HEP_VO,IsDesktop,OSG_VO,x509userproxysubject,PassedTest,IsLocalCMSJob,IsLocalCMSSlot,IsSAMSlot,IsSAMJob,MaxDiskTempC,IsDedicated,estimated_run_hours,IsCHTCSubmit,RequiresCVMFS,DynamicSlot,PartitionableSlot,Slot1_ExpectedMachineGracefulDrainingCompletion,Slot1_J
 obStarts,Slot1_SelfMonitorAge,Slot2_ExpectedMachineGracefulDrainingCompletion,Slot2_JobStarts,Slot2_SelfMonitorAge,Slot3_ExpectedMachineGracefulDrainingCompletion,Slot3_JobStarts,Slot3_SelfMonitorAge,Slot4_ExpectedMachineGracefulDrainingCompletion,Slot4_JobStarts,Slot4_SelfMonitorAge,Slot5_ExpectedMachineGracefulDrainingCompletion,Slot5_JobStarts,Slot5_SelfMonitorAge,Slot6_ExpectedMachineGracefulDrainingCompletion,Slot6_JobStarts,Slot6_SelfMonitorAge,Slot7_ExpectedMachineGracefulDrainingCompletion,Slot7_JobStarts,Slot7_SelfMonitorAge,Slot8_ExpectedMachineGracefulDrainingCompletion,Slot8_JobStarts,Slot8_SelfMonitorAge,Slot1_TotalTimeClaimedBusy,Slot1_TotalTimeUnclaimedIdle,Slot2_TotalTimeClaimedBusy,Slot2_TotalTimeUnclaimedIdle,Slot3_TotalTimeClaimedBusy,Slot3_TotalTimeUnclaimedIdle,Slot4_TotalTimeClaimedBusy,Slot4_TotalTimeUnclaimedIdle,Slot5_TotalTimeClaimedBusy,Slot5_TotalTimeUnclaimedIdle,Slot6_TotalTimeClaimedBusy,Slot6_TotalTimeUnclaimedIdle,Slot7_TotalTimeClaimedBusy,Slot7_Tot
 alTimeUnclaimedIdle,Slot8_TotalTimeClaimedBusy,Slot8_TotalTimeUnclaimedIdle,Slot10_ExpectedMachineGracefulDrainingCompletion,Slot10_JobStarts,Slot10_SelfMonitorAge,Slot11_ExpectedMachineGracefulDrainingCompletion,Slot11_JobStarts,Slot11_SelfMonitorAge,Slot12_ExpectedMachineGracefulDrainingCompletion,Slot12_JobStarts,Slot12_SelfMonitorAge,Slot9_ExpectedMachineGracefulDrainingCompletion,Slot9_JobStarts,Slot9_SelfMonitorAge,Slot12_TotalTimeClaimedBusy,Slot10_TotalTimeClaimedBusy,Slot10_TotalTimeUnclaimedIdle,Slot11_TotalTimeClaimedBusy,Slot11_TotalTimeUnclaimedIdle,Slot12_TotalTimeUnclaimedIdle,Slot9_TotalTimeClaimedBusy,Slot9_TotalTimeUnclaimedIdle,Slot13_ExpectedMachineGracefulDrainingCompletion,Slot13_JobStarts,Slot13_SelfMonitorAge,Slot14_ExpectedMachineGracefulDrainingCompletion,Slot14_JobStarts,Slot14_SelfMonitorAge,Slot15_ExpectedMachineGracefulDrainingCompletion,Slot15_JobStarts,Slot15_SelfMonitorAge,Slot16_ExpectedMachineGracefulDrainingCompletion,Slot16_JobStarts,Slot16_SelfM
 onitorAge,IsResumable,WHEN_TO_TRANSFER_OUTPUT,_condor_Requestadmin_mutex_1,_condor_Requestadmin_mutex_2,_condor_Requestadmin_mutex_3,_condor_Requestmachine_token,Requestadmin_mutex_1,Requestadmin_mutex_2,Requestadmin_mutex_3,Requestmachine_token,nyehle,IsBuildJob,IsMatlabBuildJob,TotalJobRunTime,NodeOnline,Slot13_TotalTimeClaimedBusy,Slot13_TotalTimeUnclaimedIdle,Slot14_TotalTimeClaimedBusy,Slot14_TotalTimeUnclaimedIdle,Slot15_TotalTimeClaimedBusy,Slot15_TotalTimeUnclaimedIdle,Slot16_TotalTimeClaimedBusy,Slot16_TotalTimeUnclaimedIdle,TmpIsFull,trResumable,RequiresCMSFrontier,Slot17_ExpectedMachineGracefulDrainingCompletion,Slot17_JobStarts,Slot17_SelfMonitorAge,Slot17_TotalTimeClaimedBusy,Slot17_TotalTimeUnclaimedIdle,Slot18_ExpectedMachineGracefulDrainingCompletion,Slot18_JobStarts,Slot18_SelfMonitorAge,Slot18_TotalTimeClaimedBusy,Slot18_TotalTimeUnclaimedIdle,Slot19_ExpectedMachineGracefulDrainingCompletion,Slot19_JobStarts,Slot19_SelfMonitorAge,Slot19_TotalTimeClaimedBusy,Slot19_
 TotalTimeUnclaimedIdle,Slot20_ExpectedMachineGracefulDrainingCompletion,Slot20_JobStarts,Slot20_SelfMonitorAge,Slot20_TotalTimeClaimedBusy,Slot20_TotalTimeUnclaimedIdle,Slot21_ExpectedMachineGracefulDrainingCompletion,Slot21_JobStarts,Slot21_SelfMonitorAge,Slot21_TotalTimeClaimedBusy,Slot21_TotalTimeUnclaimedIdle,Slot22_ExpectedMachineGracefulDrainingCompletion,Slot22_JobStarts,Slot22_SelfMonitorAge,Slot22_TotalTimeClaimedBusy,Slot22_TotalTimeUnclaimedIdle,Slot23_ExpectedMachineGracefulDrainingCompletion,Slot23_JobStarts,Slot23_SelfMonitorAge,Slot23_TotalTimeClaimedBusy,Slot23_TotalTimeUnclaimedIdle,Slot24_ExpectedMachineGracefulDrainingCompletion,Slot24_JobStarts,Slot24_SelfMonitorAge,Slot24_TotalTimeClaimedBusy,Slot24_TotalTimeUnclaimedIdle,Slot25_ExpectedMachineGracefulDrainingCompletion,Slot25_JobStarts,Slot25_SelfMonitorAge,Slot25_TotalTimeClaimedBusy,Slot25_TotalTimeUnclaimedIdle,Slot26_ExpectedMachineGracefulDrainingCompletion,Slot26_JobStarts,Slot26_SelfMonitorAge,Slot26_Tot
 alTimeClaimedBusy,Slot26_TotalTimeUnclaimedIdle,Slot27_ExpectedMachineGracefulDrainingCompletion,Slot27_JobStarts,Slot27_SelfMonitorAge,Slot27_TotalTimeClaimedBusy,Slot27_TotalTimeUnclaimedIdle,Slot28_ExpectedMachineGracefulDrainingCompletion,Slot28_JobStarts,Slot28_SelfMonitorAge,Slot28_TotalTimeClaimedBusy,Slot28_TotalTimeUnclaimedIdle,Slot29_ExpectedMachineGracefulDrainingCompletion,Slot29_JobStarts,Slot29_SelfMonitorAge,Slot29_TotalTimeClaimedBusy,Slot29_TotalTimeUnclaimedIdle,Slot30_ExpectedMachineGracefulDrainingCompletion,Slot30_JobStarts,Slot30_SelfMonitorAge,Slot30_TotalTimeClaimedBusy,Slot30_TotalTimeUnclaimedIdle,Slot31_ExpectedMachineGracefulDrainingCompletion,Slot31_JobStarts,Slot31_SelfMonitorAge,Slot31_TotalTimeClaimedBusy,Slot31_TotalTimeUnclaimedIdle,Slot32_ExpectedMachineGracefulDrainingCompletion,Slot32_JobStarts,Slot32_SelfMonitorAge,Slot32_TotalTimeClaimedBusy,Slot32_TotalTimeUnclaimedIdle,ResidentSetSize", "NumCkpts": 0, "Err": "process.err", "RecentBlockWrites
 ": 0, "JobFinishedHookDone": 1446132649, "ProcId": 0, "ImageSize": 1000000, "JobUniverse": 5, "EncryptExecuteDirectory": false, "TransferInputSizeMB": 0, "RecentBlockReads": 0, "SpooledOutputFiles": "CURLTIME_1160521,ChtcWrapper401.out,AuditLog.401,simu_3_401.txt,harvest.log,401.out", "BlockWriteKbytes": 0, "WhenToTransferOutput": "ON_EXIT", "JobCurrentStartExecutingDate": 1446107043, "ExitBySignal": false, "LastMatchTime": 1446107042, "OnExitHold": false, "OrigMaxHosts": 1, "RequestMemory": 1000, "NumJobStarts": 1, "TerminationPending": true, "TotalSuspensions": 0, "BlockReads": 0, "DAGManJobId": 49581933, "MemoryUsage": "( ( ResidentSetSize + 1023 ) / 1024 )", "ExitCode": 0, "JobNotification": 0, "BlockReadKbytes": 0, "NumJobMatches": 1, "LocalUserCpu": 0.0d, "LastJobStatus": 2, "BufferBlockSize": 32768, "CommittedTime": 25607, "ExecutableSize_RAW": 6, "LastSuspensionTime": 0, "Matlab": "R2011b", "UserLog": "/home/xguo23/finally_2/Simulation_condor/model_3/401/process.log", "DAGMa
 nNodesMask": "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27", "CumulativeSlotTime": 25607.0d, "LastJobLeaseRenewal": 1446132649, "MachineAttrSlotWeight0": 1, "NumSystemHolds": 0, "BytesRecvd": 285054.0d, "CondorPlatform": "$CondorPlatform: X86_64-RedHat_6.6 $", "JOBGLIDEIN_ResourceName": "$$([IfThenElse(IsUndefined(TARGET.GLIDEIN_ResourceName), IfThenElse(IsUndefined(TARGET.GLIDEIN_Site), \"wisc.edu\", TARGET.GLIDEIN_Site), TARGET.GLIDEIN_ResourceName)])", "DAGNodeName": "401+401", "PeriodicRelease": "( JobStatus == 5 ) && ( ( CurrentTime - EnteredCurrentStatus ) > 1800 ) && ( JobRunCount < 5 ) && ( HoldReasonCode != 6 ) && ( HoldReasonCode != 14 ) && ( HoldReasonCode != 22 )", "JobRunCount": 1, "LastRemoteHost": "slot1@e406.chtc.wisc.edu", "JobPrio": 0, "LocalSysCpu": 0.0d, "ExecutableSize": 7, "RemoteSysCpu": 89.0d, "TransferInput": "/home/xguo23/finally_2/Simulation_condor/data/401/,/home/xguo23/finally_2/Simulation_condor/data/shared/", "PeriodicHold": false, "WantRemoteIO": true, "Comm
 ittedSuspensionTime": 0, "DAGParentNodeNames": "", "CompletionDate": 1446132649, "StreamErr": false, "RecentBlockReadKbytes": 0, "WantRemoteSyscalls": false, "NumShadowStarts": 1, "MachineAttrCpus0": 1, "DAGManNodesLog": "/home/xguo23/finally_2/Simulation_condor/model_3/./mydag.dag.nodes.log", "Owner": "xguo23", "Requirements": "( ( OpSysMajorVer is 6 ) ) && ( MY.JobUniverse == 12 || MY.JobUniverse == 7 || MY.WantFlocking || MY.WantGlidein || TARGET.PoolName == \"CHTC\" || TARGET.COLLECTOR_HOST_STRING == \"infopool.cs.wisc.edu\" ) && ( TARGET.Arch == \"X86_64\" ) && ( TARGET.OpSys == \"LINUX\" ) && ( TARGET.Disk >= RequestDisk ) && ( TARGET.Memory >= RequestMemory ) && ( TARGET.HasFileTransfer )", "DiskUsage": 1250000, "JobLeaseDuration": 2400, "ClusterId": 49582065, "BufferSize": 524288, "IsCHTCSubmit": true, "RecentStatsLifetimeStarter": 1200, "CommittedSlotTime": 25607.0d, "Args": "--type=Matlab --version=R2011b --cmdtorun=simu_condor --unique=401 -- 3", "Environment": "", "LastP
 ublicClaimId": "<128.105.245.206:27946>#1443991437#15826#...", "Iwd": "/home/xguo23/finally_2/Simulation_condor/model_3/401", "QDate": 1446105458, "CurrentHosts": 0, "User": "xguo23@chtc.wisc.edu", "StreamOut": false }
-{ "GlobalJobId": "submit-3.chtc.wisc.edu#49582094.0#1446105491", "StatsLifetimeStarter": 25168, "JobStartDate": 1446107489, "SubmitEventNotes": "DAG Node: 106+106", "JobStatus": 4, "LeaveJobInQueue": false, "StartdPrincipal": "execute-side@matchsession/128.104.55.83", "WantRHEL6": true, "OnExitRemove": true, "JobCurrentStartDate": 1446107489, "CoreSize": 0, "MATCH_EXP_JOBGLIDEIN_ResourceName": "wisc.edu", "Rank": 0.0d, "ExitStatus": 0, "ResidentSetSize": 75000, "RemoteWallClockTime": 25169.0d, "WantCheckpoint": false, "In": "/dev/null", "MaxHosts": 1, "RootDir": "/", "NumRestarts": 0, "RecentBlockWriteKbytes": 4, "DiskUsage_RAW": 1216669, "EnteredCurrentStatus": 1446132658, "ResidentSetSize_RAW": 72016, "RequestDisk": 1000000, "MyType": "Job", "PeriodicRemove": false, "Cmd": "/home/xguo23/finally_2/Simulation_condor/chtcjobwrapper", "CondorVersion": "$CondorVersion: 8.5.0 Sep 16 2015 BuildID: 341710 $", "ShouldTransferFiles": "YES", "TargetType": "Machine", "MinHosts": 1, "NumCkpts_
 RAW": 0, "RequestCpus": 1, "RemoteUserCpu": 24949.0d, "BlockWrites": 1, "NiceUser": false, "Out": "process.out", "ImageSize_RAW": 119520, "BytesSent": 30486.0d, "CumulativeSuspensionTime": 0, "TransferIn": false, "NumCkpts": 0, "Err": "process.err", "RecentBlockWrites": 1, "JobFinishedHookDone": 1446132658, "ProcId": 0, "ImageSize": 125000, "JobUniverse": 5, "EncryptExecuteDirectory": false, "TransferInputSizeMB": 0, "RecentBlockReads": 86, "SpooledOutputFiles": "CURLTIME_122139,ChtcWrapper106.out,AuditLog.106,simu_3_106.txt,harvest.log,106.out", "BlockWriteKbytes": 4, "WhenToTransferOutput": "ON_EXIT", "JobCurrentStartExecutingDate": 1446107490, "ExitBySignal": false, "LastMatchTime": 1446107489, "OnExitHold": false, "OrigMaxHosts": 1, "RequestMemory": 1000, "NumJobStarts": 1, "TerminationPending": true, "TotalSuspensions": 0, "BlockReads": 665, "DAGManJobId": 49581933, "MemoryUsage": "( ( ResidentSetSize + 1023 ) / 1024 )", "ExitCode": 0, "JobNotification": 0, "BlockReadKbytes": 2
 6620, "NumJobMatches": 1, "LocalUserCpu": 0.0d, "LastJobStatus": 2, "BufferBlockSize": 32768, "CommittedTime": 25169, "ExecutableSize_RAW": 6, "LastSuspensionTime": 0, "Matlab": "R2011b", "UserLog": "/home/xguo23/finally_2/Simulation_condor/model_3/106/process.log", "DAGManNodesMask": "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27", "CumulativeSlotTime": 25169.0d, "LastJobLeaseRenewal": 1446132658, "MachineAttrSlotWeight0": 1, "NumSystemHolds": 0, "BytesRecvd": 285054.0d, "CondorPlatform": "$CondorPlatform: X86_64-RedHat_6.6 $", "JOBGLIDEIN_ResourceName": "$$([IfThenElse(IsUndefined(TARGET.GLIDEIN_ResourceName), IfThenElse(IsUndefined(TARGET.GLIDEIN_Site), \"wisc.edu\", TARGET.GLIDEIN_Site), TARGET.GLIDEIN_ResourceName)])", "DAGNodeName": "106+106", "PeriodicRelease": "( JobStatus == 5 ) && ( ( CurrentTime - EnteredCurrentStatus ) > 1800 ) && ( JobRunCount < 5 ) && ( HoldReasonCode != 6 ) && ( HoldReasonCode != 14 ) && ( HoldReasonCode != 22 )", "JobRunCount": 1, "LastRemoteHost": "slot1@c0
 64.chtc.wisc.edu", "JobPrio": 0, "LocalSysCpu": 0.0d, "ExecutableSize": 7, "RemoteSysCpu": 204.0d, "TransferInput": "/home/xguo23/finally_2/Simulation_condor/data/106/,/home/xguo23/finally_2/Simulation_condor/data/shared/", "PeriodicHold": false, "WantRemoteIO": true, "CommittedSuspensionTime": 0, "DAGParentNodeNames": "", "CompletionDate": 1446132658, "StreamErr": false, "RecentBlockReadKbytes": 960, "WantRemoteSyscalls": false, "NumShadowStarts": 1, "MachineAttrCpus0": 1, "DAGManNodesLog": "/home/xguo23/finally_2/Simulation_condor/model_3/./mydag.dag.nodes.log", "Owner": "xguo23", "Requirements": "( ( OpSysMajorVer is 6 ) ) && ( MY.JobUniverse == 12 || MY.JobUniverse == 7 || MY.WantFlocking || MY.WantGlidein || TARGET.PoolName == \"CHTC\" || TARGET.COLLECTOR_HOST_STRING == \"infopool.cs.wisc.edu\" ) && ( TARGET.Arch == \"X86_64\" ) && ( TARGET.OpSys == \"LINUX\" ) && ( TARGET.Disk >= RequestDisk ) && ( TARGET.Memory >= RequestMemory ) && ( TARGET.HasFileTransfer )", "DiskUsage": 1
 250000, "JobLeaseDuration": 2400, "ClusterId": 49582094, "BufferSize": 524288, "IsCHTCSubmit": true, "RecentStatsLifetimeStarter": 1200, "CommittedSlotTime": 25169.0d, "Args": "--type=Matlab --version=R2011b --cmdtorun=simu_condor --unique=106 -- 3", "Environment": "", "LastPublicClaimId": "<128.104.55.83:25899>#1445308581#1240#...", "Iwd": "/home/xguo23/finally_2/Simulation_condor/model_3/106", "QDate": 1446105491, "CurrentHosts": 0, "User": "xguo23@chtc.wisc.edu", "StreamOut": false }
-{ "GlobalJobId": "submit-3.chtc.wisc.edu#49582098.0#1446105492", "StatsLifetimeStarter": 26020, "JobStartDate": 1446107489, "SubmitEventNotes": "DAG Node: 304+304", "JobStatus": 4, "LeaveJobInQueue": false, "StartdPrincipal": "execute-side@matchsession/128.105.245.223", "WantRHEL6": true, "OnExitRemove": true, "JobCurrentStartDate": 1446107489, "CoreSize": 0, "MATCH_EXP_JOBGLIDEIN_ResourceName": "wisc.edu", "Rank": 0.0d, "ExitStatus": 0, "ResidentSetSize": 150000, "RemoteWallClockTime": 26022.0d, "WantCheckpoint": false, "In": "/dev/null", "MaxHosts": 1, "RootDir": "/", "NumRestarts": 0, "RecentBlockWriteKbytes": 0, "DiskUsage_RAW": 1216669, "EnteredCurrentStatus": 1446133511, "ResidentSetSize_RAW": 128776, "RequestDisk": 1000000, "MyType": "Job", "PeriodicRemove": false, "Cmd": "/home/xguo23/finally_2/Simulation_condor/chtcjobwrapper", "CondorVersion": "$CondorVersion: 8.5.0 Sep 16 2015 BuildID: 341710 $", "ShouldTransferFiles": "YES", "TargetType": "Machine", "MinHosts": 1, "NumCk
 pts_RAW": 0, "RequestCpus": 1, "RemoteUserCpu": 25844.0d, "BlockWrites": 0, "NiceUser": false, "Out": "process.out", "ImageSize_RAW": 811948, "BytesSent": 31801.0d, "CumulativeSuspensionTime": 0, "TransferIn": false, "NumCkpts": 0, "Err": "process.err", "RecentBlockWrites": 0, "JobFinishedHookDone": 1446133511, "ProcId": 0, "ImageSize": 1000000, "JobUniverse": 5, "EncryptExecuteDirectory": false, "TransferInputSizeMB": 0, "RecentBlockReads": 0, "SpooledOutputFiles": "CURLTIME_3651606,ChtcWrapper304.out,AuditLog.304,simu_3_304.txt,harvest.log,304.out", "BlockWriteKbytes": 0, "WhenToTransferOutput": "ON_EXIT", "JobCurrentStartExecutingDate": 1446107491, "ExitBySignal": false, "LastMatchTime": 1446107489, "OnExitHold": false, "OrigMaxHosts": 1, "RequestMemory": 1000, "NumJobStarts": 1, "TerminationPending": true, "TotalSuspensions": 0, "BlockReads": 0, "DAGManJobId": 49581933, "MemoryUsage": "( ( ResidentSetSize + 1023 ) / 1024 )", "ExitCode": 0, "JobNotification": 0, "BlockReadKbytes"
 : 0, "NumJobMatches": 1, "LocalUserCpu": 0.0d, "LastJobStatus": 2, "BufferBlockSize": 32768, "CommittedTime": 26022, "ExecutableSize_RAW": 6, "LastSuspensionTime": 0, "Matlab": "R2011b", "UserLog": "/home/xguo23/finally_2/Simulation_condor/model_3/304/process.log", "DAGManNodesMask": "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27", "CumulativeSlotTime": 26022.0d, "LastJobLeaseRenewal": 1446133511, "MachineAttrSlotWeight0": 1, "NumSystemHolds": 0, "BytesRecvd": 285054.0d, "CondorPlatform": "$CondorPlatform: X86_64-RedHat_6.6 $", "JOBGLIDEIN_ResourceName": "$$([IfThenElse(IsUndefined(TARGET.GLIDEIN_ResourceName), IfThenElse(IsUndefined(TARGET.GLIDEIN_Site), \"wisc.edu\", TARGET.GLIDEIN_Site), TARGET.GLIDEIN_ResourceName)])", "DAGNodeName": "304+304", "PeriodicRelease": "( JobStatus == 5 ) && ( ( CurrentTime - EnteredCurrentStatus ) > 1800 ) && ( JobRunCount < 5 ) && ( HoldReasonCode != 6 ) && ( HoldReasonCode != 14 ) && ( HoldReasonCode != 22 )", "JobRunCount": 1, "LastRemoteHost": "slot1@e42
 3.chtc.wisc.edu", "JobPrio": 0, "LocalSysCpu": 0.0d, "ExecutableSize": 7, "RemoteSysCpu": 143.0d, "TransferInput": "/home/xguo23/finally_2/Simulation_condor/data/304/,/home/xguo23/finally_2/Simulation_condor/data/shared/", "PeriodicHold": false, "WantRemoteIO": true, "CommittedSuspensionTime": 0, "DAGParentNodeNames": "", "CompletionDate": 1446133511, "StreamErr": false, "RecentBlockReadKbytes": 0, "WantRemoteSyscalls": false, "NumShadowStarts": 1, "MachineAttrCpus0": 1, "DAGManNodesLog": "/home/xguo23/finally_2/Simulation_condor/model_3/./mydag.dag.nodes.log", "Owner": "xguo23", "Requirements": "( ( OpSysMajorVer is 6 ) ) && ( MY.JobUniverse == 12 || MY.JobUniverse == 7 || MY.WantFlocking || MY.WantGlidein || TARGET.PoolName == \"CHTC\" || TARGET.COLLECTOR_HOST_STRING == \"infopool.cs.wisc.edu\" ) && ( TARGET.Arch == \"X86_64\" ) && ( TARGET.OpSys == \"LINUX\" ) && ( TARGET.Disk >= RequestDisk ) && ( TARGET.Memory >= RequestMemory ) && ( TARGET.HasFileTransfer )", "DiskUsage": 1250
 000, "JobLeaseDuration": 2400, "ClusterId": 49582098, "BufferSize": 524288, "IsCHTCSubmit": true, "RecentStatsLifetimeStarter": 1200, "CommittedSlotTime": 26022.0d, "Args": "--type=Matlab --version=R2011b --cmdtorun=simu_condor --unique=304 -- 3", "Environment": "", "LastPublicClaimId": "<128.105.245.223:13467>#1444760039#6376#...", "Iwd": "/home/xguo23/finally_2/Simulation_condor/model_3/304", "QDate": 1446105492, "CurrentHosts": 0, "User": "xguo23@chtc.wisc.edu", "StreamOut": false }
-{ "GlobalJobId": "submit-3.chtc.wisc.edu#49582113.0#1446105509", "StatsLifetimeStarter": 26044, "JobStartDate": 1446107490, "SubmitEventNotes": "DAG Node: 206+206", "JobStatus": 4, "LeaveJobInQueue": false, "StartdPrincipal": "execute-side@matchsession/128.105.245.120", "WantRHEL6": true, "OnExitRemove": true, "JobCurrentStartDate": 1446107490, "CoreSize": 0, "MATCH_EXP_JOBGLIDEIN_ResourceName": "wisc.edu", "Rank": 0.0d, "ExitStatus": 0, "ResidentSetSize": 150000, "RemoteWallClockTime": 26045.0d, "WantCheckpoint": false, "In": "/dev/null", "MaxHosts": 1, "RootDir": "/", "NumRestarts": 0, "RecentBlockWriteKbytes": 0, "DiskUsage_RAW": 1216669, "EnteredCurrentStatus": 1446133535, "ResidentSetSize_RAW": 126460, "RequestDisk": 1000000, "MyType": "Job", "PeriodicRemove": false, "Cmd": "/home/xguo23/finally_2/Simulation_condor/chtcjobwrapper", "CondorVersion": "$CondorVersion: 8.5.0 Sep 16 2015 BuildID: 341710 $", "ShouldTransferFiles": "YES", "TargetType": "Machine", "MinHosts": 1, "NumCk
 pts_RAW": 0, "RequestCpus": 1, "RemoteUserCpu": 25939.0d, "BlockWrites": 0, "NiceUser": false, "Out": "process.out", "ImageSize_RAW": 811948, "BytesSent": 30596.0d, "CumulativeSuspensionTime": 0, "TransferIn": false, "NumCkpts": 0, "Err": "process.err", "RecentBlockWrites": 0, "JobFinishedHookDone": 1446133535, "ProcId": 0, "ImageSize": 1000000, "JobUniverse": 5, "EncryptExecuteDirectory": false, "TransferInputSizeMB": 0, "RecentBlockReads": 0, "SpooledOutputFiles": "CURLTIME_522843,ChtcWrapper206.out,AuditLog.206,simu_3_206.txt,harvest.log,206.out", "BlockWriteKbytes": 0, "WhenToTransferOutput": "ON_EXIT", "JobCurrentStartExecutingDate": 1446107491, "ExitBySignal": false, "LastMatchTime": 1446107490, "OnExitHold": false, "OrigMaxHosts": 1, "RequestMemory": 1000, "NumJobStarts": 1, "TerminationPending": true, "TotalSuspensions": 0, "BlockReads": 0, "DAGManJobId": 49581933, "MemoryUsage": "( ( ResidentSetSize + 1023 ) / 1024 )", "ExitCode": 0, "JobNotification": 0, "BlockReadKbytes":
  0, "NumJobMatches": 1, "LocalUserCpu": 0.0d, "LastJobStatus": 2, "BufferBlockSize": 32768, "CommittedTime": 26045, "ExecutableSize_RAW": 6, "LastSuspensionTime": 0, "Matlab": "R2011b", "UserLog": "/home/xguo23/finally_2/Simulation_condor/model_3/206/process.log", "DAGManNodesMask": "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27", "CumulativeSlotTime": 26045.0d, "LastJobLeaseRenewal": 1446133535, "MachineAttrSlotWeight0": 1, "NumSystemHolds": 0, "BytesRecvd": 285054.0d, "CondorPlatform": "$CondorPlatform: X86_64-RedHat_6.6 $", "JOBGLIDEIN_ResourceName": "$$([IfThenElse(IsUndefined(TARGET.GLIDEIN_ResourceName), IfThenElse(IsUndefined(TARGET.GLIDEIN_Site), \"wisc.edu\", TARGET.GLIDEIN_Site), TARGET.GLIDEIN_ResourceName)])", "DAGNodeName": "206+206", "PeriodicRelease": "( JobStatus == 5 ) && ( ( CurrentTime - EnteredCurrentStatus ) > 1800 ) && ( JobRunCount < 5 ) && ( HoldReasonCode != 6 ) && ( HoldReasonCode != 14 ) && ( HoldReasonCode != 22 )", "JobRunCount": 1, "LastRemoteHost": "slot1@e320
 .chtc.wisc.edu", "JobPrio": 0, "LocalSysCpu": 0.0d, "ExecutableSize": 7, "RemoteSysCpu": 87.0d, "TransferInput": "/home/xguo23/finally_2/Simulation_condor/data/206/,/home/xguo23/finally_2/Simulation_condor/data/shared/", "PeriodicHold": false, "WantRemoteIO": true, "CommittedSuspensionTime": 0, "DAGParentNodeNames": "", "CompletionDate": 1446133535, "StreamErr": false, "RecentBlockReadKbytes": 0, "WantRemoteSyscalls": false, "NumShadowStarts": 1, "MachineAttrCpus0": 1, "DAGManNodesLog": "/home/xguo23/finally_2/Simulation_condor/model_3/./mydag.dag.nodes.log", "Owner": "xguo23", "Requirements": "( ( OpSysMajorVer is 6 ) ) && ( MY.JobUniverse == 12 || MY.JobUniverse == 7 || MY.WantFlocking || MY.WantGlidein || TARGET.PoolName == \"CHTC\" || TARGET.COLLECTOR_HOST_STRING == \"infopool.cs.wisc.edu\" ) && ( TARGET.Arch == \"X86_64\" ) && ( TARGET.OpSys == \"LINUX\" ) && ( TARGET.Disk >= RequestDisk ) && ( TARGET.Memory >= RequestMemory ) && ( TARGET.HasFileTransfer )", "DiskUsage": 125000
 0, "JobLeaseDuration": 2400, "ClusterId": 49582113, "BufferSize": 524288, "IsCHTCSubmit": true, "RecentStatsLifetimeStarter": 1200, "CommittedSlotTime": 26045.0d, "Args": "--type=Matlab --version=R2011b --cmdtorun=simu_condor --unique=206 -- 3", "Environment": "", "LastPublicClaimId": "<128.105.245.120:45185>#1443991409#14238#...", "Iwd": "/home/xguo23/finally_2/Simulation_condor/model_3/206", "QDate": 1446105509, "CurrentHosts": 0, "User": "xguo23@chtc.wisc.edu", "StreamOut": false }
-{ "GlobalJobId": "submit-3.chtc.wisc.edu#49582119.0#1446105519", "StatsLifetimeStarter": 24928, "JobStartDate": 1446107490, "SubmitEventNotes": "DAG Node: 152+152", "JobStatus": 4, "LeaveJobInQueue": false, "StartdPrincipal": "execute-side@matchsession/128.105.245.242", "WantRHEL6": true, "OnExitRemove": true, "JobCurrentStartDate": 1446107490, "CoreSize": 0, "MATCH_EXP_JOBGLIDEIN_ResourceName": "wisc.edu", "Rank": 0.0d, "ExitStatus": 0, "ResidentSetSize": 150000, "RemoteWallClockTime": 24930.0d, "WantCheckpoint": false, "In": "/dev/null", "MaxHosts": 1, "RootDir": "/", "NumRestarts": 0, "RecentBlockWriteKbytes": 0, "DiskUsage_RAW": 1216669, "EnteredCurrentStatus": 1446132420, "ResidentSetSize_RAW": 128972, "RequestDisk": 1000000, "MyType": "Job", "PeriodicRemove": false, "Cmd": "/home/xguo23/finally_2/Simulation_condor/chtcjobwrapper", "CondorVersion": "$CondorVersion: 8.5.0 Sep 16 2015 BuildID: 341710 $", "ShouldTransferFiles": "YES", "TargetType": "Machine", "MinHosts": 1, "NumCk
 pts_RAW": 0, "RequestCpus": 1, "RemoteUserCpu": 24742.0d, "BlockWrites": 0, "NiceUser": false, "Out": "process.out", "ImageSize_RAW": 811948, "BytesSent": 30431.0d, "CumulativeSuspensionTime": 0, "TransferIn": false, "NumCkpts": 0, "Err": "process.err", "RecentBlockWrites": 0, "JobFinishedHookDone": 1446132420, "ProcId": 0, "ImageSize": 1000000, "JobUniverse": 5, "EncryptExecuteDirectory": false, "TransferInputSizeMB": 0, "RecentBlockReads": 0, "SpooledOutputFiles": "CURLTIME_674,ChtcWrapper152.out,AuditLog.152,simu_3_152.txt,harvest.log,152.out", "BlockWriteKbytes": 0, "WhenToTransferOutput": "ON_EXIT", "JobCurrentStartExecutingDate": 1446107491, "ExitBySignal": false, "LastMatchTime": 1446107490, "OnExitHold": false, "OrigMaxHosts": 1, "RequestMemory": 1000, "NumJobStarts": 1, "TerminationPending": true, "TotalSuspensions": 0, "BlockReads": 0, "DAGManJobId": 49581933, "MemoryUsage": "( ( ResidentSetSize + 1023 ) / 1024 )", "ExitCode": 0, "JobNotification": 0, "BlockReadKbytes": 0,
  "NumJobMatches": 1, "LocalUserCpu": 0.0d, "LastJobStatus": 2, "BufferBlockSize": 32768, "CommittedTime": 24930, "ExecutableSize_RAW": 6, "LastSuspensionTime": 0, "Matlab": "R2011b", "UserLog": "/home/xguo23/finally_2/Simulation_condor/model_3/152/process.log", "DAGManNodesMask": "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27", "CumulativeSlotTime": 24930.0d, "LastJobLeaseRenewal": 1446132420, "MachineAttrSlotWeight0": 1, "NumSystemHolds": 0, "BytesRecvd": 285054.0d, "CondorPlatform": "$CondorPlatform: X86_64-RedHat_6.6 $", "JOBGLIDEIN_ResourceName": "$$([IfThenElse(IsUndefined(TARGET.GLIDEIN_ResourceName), IfThenElse(IsUndefined(TARGET.GLIDEIN_Site), \"wisc.edu\", TARGET.GLIDEIN_Site), TARGET.GLIDEIN_ResourceName)])", "DAGNodeName": "152+152", "PeriodicRelease": "( JobStatus == 5 ) && ( ( CurrentTime - EnteredCurrentStatus ) > 1800 ) && ( JobRunCount < 5 ) && ( HoldReasonCode != 6 ) && ( HoldReasonCode != 14 ) && ( HoldReasonCode != 22 )", "JobRunCount": 1, "LastRemoteHost": "slot1@e442.ch
 tc.WISC.EDU", "JobPrio": 0, "LocalSysCpu": 0.0d, "ExecutableSize": 7, "RemoteSysCpu": 156.0d, "TransferInput": "/home/xguo23/finally_2/Simulation_condor/data/152/,/home/xguo23/finally_2/Simulation_condor/data/shared/", "PeriodicHold": false, "WantRemoteIO": true, "CommittedSuspensionTime": 0, "DAGParentNodeNames": "", "CompletionDate": 1446132420, "StreamErr": false, "RecentBlockReadKbytes": 0, "WantRemoteSyscalls": false, "NumShadowStarts": 1, "MachineAttrCpus0": 1, "DAGManNodesLog": "/home/xguo23/finally_2/Simulation_condor/model_3/./mydag.dag.nodes.log", "Owner": "xguo23", "Requirements": "( ( OpSysMajorVer is 6 ) ) && ( MY.JobUniverse == 12 || MY.JobUniverse == 7 || MY.WantFlocking || MY.WantGlidein || TARGET.PoolName == \"CHTC\" || TARGET.COLLECTOR_HOST_STRING == \"infopool.cs.wisc.edu\" ) && ( TARGET.Arch == \"X86_64\" ) && ( TARGET.OpSys == \"LINUX\" ) && ( TARGET.Disk >= RequestDisk ) && ( TARGET.Memory >= RequestMemory ) && ( TARGET.HasFileTransfer )", "DiskUsage": 1250000,
  "JobLeaseDuration": 2400, "ClusterId": 49582119, "BufferSize": 524288, "IsCHTCSubmit": true, "RecentStatsLifetimeStarter": 1200, "CommittedSlotTime": 24930.0d, "Args": "--type=Matlab --version=R2011b --cmdtorun=simu_condor --unique=152 -- 3", "Environment": "", "LastPublicClaimId": "<128.105.245.242:38884>#1443991450#10374#...", "Iwd": "/home/xguo23/finally_2/Simulation_condor/model_3/152", "QDate": 1446105519, "CurrentHosts": 0, "User": "xguo23@chtc.wisc.edu", "StreamOut": false }
-{ "GlobalJobId": "submit-3.chtc.wisc.edu#49582124.0#1446105525", "StatsLifetimeStarter": 24745, "JobStartDate": 1446107685, "SubmitEventNotes": "DAG Node: 323+323", "JobStatus": 4, "LeaveJobInQueue": false, "AutoClusterId": 13, "StartdPrincipal": "execute-side@matchsession/128.104.55.89", "WantRHEL6": true, "OnExitRemove": true, "JobCurrentStartDate": 1446107685, "CoreSize": 0, "MATCH_EXP_JOBGLIDEIN_ResourceName": "wisc.edu", "ExitStatus": 0, "Rank": 0.0d, "ResidentSetSize": 75000, "RemoteWallClockTime": 24748.0d, "WantCheckpoint": false, "In": "/dev/null", "MaxHosts": 1, "RootDir": "/", "NumRestarts": 0, "RecentBlockWriteKbytes": 0, "DiskUsage_RAW": 1216669, "EnteredCurrentStatus": 1446132433, "ResidentSetSize_RAW": 71248, "RequestDisk": 1000000, "MyType": "Job", "PeriodicRemove": false, "Cmd": "/home/xguo23/finally_2/Simulation_condor/chtcjobwrapper", "CondorVersion": "$CondorVersion: 8.5.0 Sep 16 2015 BuildID: 341710 $", "ShouldTransferFiles": "YES", "TargetType": "Machine", "Min
 Hosts": 1, "NumCkpts_RAW": 0, "RequestCpus": 1, "RemoteUserCpu": 21145.0d, "BlockWrites": 1, "NiceUser": false, "Out": "process.out", "ImageSize_RAW": 118000, "BytesSent": 30560.0d, "CumulativeSuspensionTime": 0, "TransferIn": false, "AutoClusterAttrs": "JobUniverse,LastCheckpointPlatform,NumCkpts,ClientMachine,_condor_RequestCpus,_condor_RequestDisk,_condor_RequestGPUs,_condor_RequestMemory,RequestCpus,RequestDisk,RequestGPUs,RequestMemory,BIOCHEM,MachineLastMatchTime,ConcurrencyLimits,NiceUser,Rank,Requirements,ImageSize,MemoryRequirements,User,RemoteGroup,SubmitterGroup,SubmitterUserPrio,Group,WIDsTheme,InteractiveJob,Is_Resumable,WantFlocking,WantGlidein,Scheduler,Owner,JobStart,MemoryUsage,IsExpressQueueJob,DiskUsage,HEP_VO,IsDesktop,OSG_VO,x509userproxysubject,PassedTest,IsLocalCMSJob,IsLocalCMSSlot,IsSAMSlot,IsSAMJob,MaxDiskTempC,IsDedicated,estimated_run_hours,IsCHTCSubmit,RequiresCVMFS,DynamicSlot,PartitionableSlot,Slot1_ExpectedMachineGracefulDrainingCompletion,Slot1_JobSt
 arts,Slot1_SelfMonitorAge,Slot2_ExpectedMachineGracefulDrainingCompletion,Slot2_JobStarts,Slot2_SelfMonitorAge,Slot3_ExpectedMachineGracefulDrainingCompletion,Slot3_JobStarts,Slot3_SelfMonitorAge,Slot4_ExpectedMachineGracefulDrainingCompletion,Slot4_JobStarts,Slot4_SelfMonitorAge,Slot5_ExpectedMachineGracefulDrainingCompletion,Slot5_JobStarts,Slot5_SelfMonitorAge,Slot6_ExpectedMachineGracefulDrainingCompletion,Slot6_JobStarts,Slot6_SelfMonitorAge,Slot7_ExpectedMachineGracefulDrainingCompletion,Slot7_JobStarts,Slot7_SelfMonitorAge,Slot8_ExpectedMachineGracefulDrainingCompletion,Slot8_JobStarts,Slot8_SelfMonitorAge,Slot1_TotalTimeClaimedBusy,Slot1_TotalTimeUnclaimedIdle,Slot2_TotalTimeClaimedBusy,Slot2_TotalTimeUnclaimedIdle,Slot3_TotalTimeClaimedBusy,Slot3_TotalTimeUnclaimedIdle,Slot4_TotalTimeClaimedBusy,Slot4_TotalTimeUnclaimedIdle,Slot5_TotalTimeClaimedBusy,Slot5_TotalTimeUnclaimedIdle,Slot6_TotalTimeClaimedBusy,Slot6_TotalTimeUnclaimedIdle,Slot7_TotalTimeClaimedBusy,Slot7_TotalTi
 meUnclaimedIdle,Slot8_TotalTimeClaimedBusy,Slot8_TotalTimeUnclaimedIdle,Slot10_ExpectedMachineGracefulDrainingCompletion,Slot10_JobStarts,Slot10_SelfMonitorAge,Slot11_ExpectedMachineGracefulDrainingCompletion,Slot11_JobStarts,Slot11_SelfMonitorAge,Slot12_ExpectedMachineGracefulDrainingCompletion,Slot12_JobStarts,Slot12_SelfMonitorAge,Slot9_ExpectedMachineGracefulDrainingCompletion,Slot9_JobStarts,Slot9_SelfMonitorAge,Slot12_TotalTimeClaimedBusy,Slot10_TotalTimeClaimedBusy,Slot10_TotalTimeUnclaimedIdle,Slot11_TotalTimeClaimedBusy,Slot11_TotalTimeUnclaimedIdle,Slot12_TotalTimeUnclaimedIdle,Slot9_TotalTimeClaimedBusy,Slot9_TotalTimeUnclaimedIdle,Slot13_ExpectedMachineGracefulDrainingCompletion,Slot13_JobStarts,Slot13_SelfMonitorAge,Slot14_ExpectedMachineGracefulDrainingCompletion,Slot14_JobStarts,Slot14_SelfMonitorAge,Slot15_ExpectedMachineGracefulDrainingCompletion,Slot15_JobStarts,Slot15_SelfMonitorAge,Slot16_ExpectedMachineGracefulDrainingCompletion,Slot16_JobStarts,Slot16_SelfMonit
 orAge,IsResumable,WHEN_TO_TRANSFER_OUTPUT,_condor_Requestadmin_mutex_1,_condor_Requestadmin_mutex_2,_condor_Requestadmin_mutex_3,_condor_Requestmachine_token,Requestadmin_mutex_1,Requestadmin_mutex_2,Requestadmin_mutex_3,Requestmachine_token,nyehle,IsBuildJob,IsMatlabBuildJob,TotalJobRunTime,NodeOnline,Slot13_TotalTimeClaimedBusy,Slot13_TotalTimeUnclaimedIdle,Slot14_TotalTimeClaimedBusy,Slot14_TotalTimeUnclaimedIdle,Slot15_TotalTimeClaimedBusy,Slot15_TotalTimeUnclaimedIdle,Slot16_TotalTimeClaimedBusy,Slot16_TotalTimeUnclaimedIdle,TmpIsFull,trResumable,RequiresCMSFrontier,Slot17_ExpectedMachineGracefulDrainingCompletion,Slot17_JobStarts,Slot17_SelfMonitorAge,Slot17_TotalTimeClaimedBusy,Slot17_TotalTimeUnclaimedIdle,Slot18_ExpectedMachineGracefulDrainingCompletion,Slot18_JobStarts,Slot18_SelfMonitorAge,Slot18_TotalTimeClaimedBusy,Slot18_TotalTimeUnclaimedIdle,Slot19_ExpectedMachineGracefulDrainingCompletion,Slot19_JobStarts,Slot19_SelfMonitorAge,Slot19_TotalTimeClaimedBusy,Slot19_Tota
 lTimeUnclaimedIdle,Slot20_ExpectedMachineGracefulDrainingCompletion,Slot20_JobStarts,Slot20_SelfMonitorAge,Slot20_TotalTimeClaimedBusy,Slot20_TotalTimeUnclaimedIdle,Slot21_ExpectedMachineGracefulDrainingCompletion,Slot21_JobStarts,Slot21_SelfMonitorAge,Slot21_TotalTimeClaimedBusy,Slot21_TotalTimeUnclaimedIdle,Slot22_ExpectedMachineGracefulDrainingCompletion,Slot22_JobStarts,Slot22_SelfMonitorAge,Slot22_TotalTimeClaimedBusy,Slot22_TotalTimeUnclaimedIdle,Slot23_ExpectedMachineGracefulDrainingCompletion,Slot23_JobStarts,Slot23_SelfMonitorAge,Slot23_TotalTimeClaimedBusy,Slot23_TotalTimeUnclaimedIdle,Slot24_ExpectedMachineGracefulDrainingCompletion,Slot24_JobStarts,Slot24_SelfMonitorAge,Slot24_TotalTimeClaimedBusy,Slot24_TotalTimeUnclaimedIdle,Slot25_ExpectedMachineGracefulDrainingCompletion,Slot25_JobStarts,Slot25_SelfMonitorAge,Slot25_TotalTimeClaimedBusy,Slot25_TotalTimeUnclaimedIdle,Slot26_ExpectedMachineGracefulDrainingCompletion,Slot26_JobStarts,Slot26_SelfMonitorAge,Slot26_TotalTi
 meClaimedBusy,Slot26_TotalTimeUnclaimedIdle,Slot27_ExpectedMachineGracefulDrainingCompletion,Slot27_JobStarts,Slot27_SelfMonitorAge,Slot27_TotalTimeClaimedBusy,Slot27_TotalTimeUnclaimedIdle,Slot28_ExpectedMachineGracefulDrainingCompletion,Slot28_JobStarts,Slot28_SelfMonitorAge,Slot28_TotalTimeClaimedBusy,Slot28_TotalTimeUnclaimedIdle,Slot29_ExpectedMachineGracefulDrainingCompletion,Slot29_JobStarts,Slot29_SelfMonitorAge,Slot29_TotalTimeClaimedBusy,Slot29_TotalTimeUnclaimedIdle,Slot30_ExpectedMachineGracefulDrainingCompletion,Slot30_JobStarts,Slot30_SelfMonitorAge,Slot30_TotalTimeClaimedBusy,Slot30_TotalTimeUnclaimedIdle,Slot31_ExpectedMachineGracefulDrainingCompletion,Slot31_JobStarts,Slot31_SelfMonitorAge,Slot31_TotalTimeClaimedBusy,Slot31_TotalTimeUnclaimedIdle,Slot32_ExpectedMachineGracefulDrainingCompletion,Slot32_JobStarts,Slot32_SelfMonitorAge,Slot32_TotalTimeClaimedBusy,Slot32_TotalTimeUnclaimedIdle,ResidentSetSize", "NumCkpts": 0, "Err": "process.err", "RecentBlockWrites": 0
 , "JobFinishedHookDone": 1446132434, "ProcId": 0, "ImageSize": 125000, "JobUniverse": 5, "EncryptExecuteDirectory": false, "TransferInputSizeMB": 0, "RecentBlockReads": 314, "SpooledOutputFiles": "harvest.log,CURLTIME_3853266,ChtcWrapper323.out,AuditLog.323,simu_3_323.txt,323.out", "BlockWriteKbytes": 4, "WhenToTransferOutput": "ON_EXIT", "JobCurrentStartExecutingDate": 1446107686, "ExitBySignal": false, "LastMatchTime": 1446107685, "OnExitHold": false, "OrigMaxHosts": 1, "RequestMemory": 1000, "NumJobStarts": 1, "TerminationPending": true, "TotalSuspensions": 0, "BlockReads": 1142, "DAGManJobId": 49581933, "MemoryUsage": "( ( ResidentSetSize + 1023 ) / 1024 )", "ExitCode": 0, "JobNotification": 0, "BlockReadKbytes": 43788, "NumJobMatches": 1, "LocalUserCpu": 0.0d, "LastJobStatus": 2, "BufferBlockSize": 32768, "CommittedTime": 24748, "ExecutableSize_RAW": 6, "LastSuspensionTime": 0, "Matlab": "R2011b", "UserLog": "/home/xguo23/finally_2/Simulation_condor/model_3/323/process.log", "D
 AGManNodesMask": "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27", "CumulativeSlotTime": 24748.0d, "LastJobLeaseRenewal": 1446132433, "MachineAttrSlotWeight0": 1, "NumSystemHolds": 0, "BytesRecvd": 285054.0d, "CondorPlatform": "$CondorPlatform: X86_64-RedHat_6.6 $", "JOBGLIDEIN_ResourceName": "$$([IfThenElse(IsUndefined(TARGET.GLIDEIN_ResourceName), IfThenElse(IsUndefined(TARGET.GLIDEIN_Site), \"wisc.edu\", TARGET.GLIDEIN_Site), TARGET.GLIDEIN_ResourceName)])", "DAGNodeName": "323+323", "PeriodicRelease": "( JobStatus == 5 ) && ( ( CurrentTime - EnteredCurrentStatus ) > 1800 ) && ( JobRunCount < 5 ) && ( HoldReasonCode != 6 ) && ( HoldReasonCode != 14 ) && ( HoldReasonCode != 22 )", "JobRunCount": 1, "LastRemoteHost": "slot1@c070.chtc.wisc.edu", "JobPrio": 0, "LocalSysCpu": 0.0d, "ExecutableSize": 7, "RemoteSysCpu": 175.0d, "TransferInput": "/home/xguo23/finally_2/Simulation_condor/data/323/,/home/xguo23/finally_2/Simulation_condor/data/shared/", "PeriodicHold": false, "WantRemoteIO": true, 
 "CommittedSuspensionTime": 0, "DAGParentNodeNames": "", "CompletionDate": 1446132433, "StreamErr": false, "RecentBlockReadKbytes": 4224, "WantRemoteSyscalls": false, "NumShadowStarts": 1, "MachineAttrCpus0": 1, "DAGManNodesLog": "/home/xguo23/finally_2/Simulation_condor/model_3/./mydag.dag.nodes.log", "Owner": "xguo23", "Requirements": "( ( OpSysMajorVer is 6 ) ) && ( MY.JobUniverse == 12 || MY.JobUniverse == 7 || MY.WantFlocking || MY.WantGlidein || TARGET.PoolName == \"CHTC\" || TARGET.COLLECTOR_HOST_STRING == \"infopool.cs.wisc.edu\" ) && ( TARGET.Arch == \"X86_64\" ) && ( TARGET.OpSys == \"LINUX\" ) && ( TARGET.Disk >= RequestDisk ) && ( TARGET.Memory >= RequestMemory ) && ( TARGET.HasFileTransfer )", "DiskUsage": 1250000, "JobLeaseDuration": 2400, "ClusterId": 49582124, "BufferSize": 524288, "IsCHTCSubmit": true, "RecentStatsLifetimeStarter": 1200, "CommittedSlotTime": 24748.0d, "Args": "--type=Matlab --version=R2011b --cmdtorun=simu_condor --unique=323 -- 3", "Environment": ""
 , "LastPublicClaimId": "<128.104.55.89:32652>#1445371750#1302#...", "Iwd": "/home/xguo23/finally_2/Simulation_condor/model_3/323", "QDate": 1446105525, "CurrentHosts": 0, "User": "xguo23@chtc.wisc.edu", "StreamOut": false }
-{ "GlobalJobId": "submit-3.chtc.wisc.edu#49582148.0#1446105547", "StatsLifetimeStarter": 26230, "JobStartDate": 1446107686, "SubmitEventNotes": "DAG Node: 162+162", "JobStatus": 4, "LeaveJobInQueue": false, "AutoClusterId": 24, "StartdPrincipal": "execute-side@matchsession/128.105.245.170", "WantRHEL6": true, "OnExitRemove": true, "JobCurrentStartDate": 1446107686, "CoreSize": 0, "MATCH_EXP_JOBGLIDEIN_ResourceName": "wisc.edu", "ExitStatus": 0, "Rank": 0.0d, "ResidentSetSize": 150000, "RemoteWallClockTime": 26233.0d, "WantCheckpoint": false, "In": "/dev/null", "MaxHosts": 1, "RootDir": "/", "NumRestarts": 0, "RecentBlockWriteKbytes": 0, "DiskUsage_RAW": 1216669, "EnteredCurrentStatus": 1446133919, "ResidentSetSize_RAW": 126384, "RequestDisk": 1000000, "MyType": "Job", "PeriodicRemove": false, "Cmd": "/home/xguo23/finally_2/Simulation_condor/chtcjobwrapper", "CondorVersion": "$CondorVersion: 8.5.0 Sep 16 2015 BuildID: 341710 $", "ShouldTransferFiles": "YES", "TargetType": "Machine", 
 "MinHosts": 1, "NumCkpts_RAW": 0, "RequestCpus": 1, "RemoteUserCpu": 26088.0d, "BlockWrites": 0, "NiceUser": false, "Out": "process.out", "ImageSize_RAW": 811948, "BytesSent": 30612.0d, "CumulativeSuspensionTime": 0, "TransferIn": false, "AutoClusterAttrs": "JobUniverse,LastCheckpointPlatform,NumCkpts,ClientMachine,_condor_RequestCpus,_condor_RequestDisk,_condor_RequestGPUs,_condor_RequestMemory,RequestCpus,RequestDisk,RequestGPUs,RequestMemory,BIOCHEM,MachineLastMatchTime,ConcurrencyLimits,NiceUser,Rank,Requirements,ImageSize,MemoryRequirements,User,RemoteGroup,SubmitterGroup,SubmitterUserPrio,Group,WIDsTheme,InteractiveJob,Is_Resumable,WantFlocking,WantGlidein,Scheduler,Owner,JobStart,MemoryUsage,IsExpressQueueJob,DiskUsage,HEP_VO,IsDesktop,OSG_VO,x509userproxysubject,PassedTest,IsLocalCMSJob,IsLocalCMSSlot,IsSAMSlot,IsSAMJob,MaxDiskTempC,IsDedicated,estimated_run_hours,IsCHTCSubmit,RequiresCVMFS,DynamicSlot,PartitionableSlot,Slot1_ExpectedMachineGracefulDrainingCompletion,Slot1_J
 obStarts,Slot1_SelfMonitorAge,Slot2_ExpectedMachineGracefulDrainingCompletion,Slot2_JobStarts,Slot2_SelfMonitorAge,Slot3_ExpectedMachineGracefulDrainingCompletion,Slot3_JobStarts,Slot3_SelfMonitorAge,Slot4_ExpectedMachineGracefulDrainingCompletion,Slot4_JobStarts,Slot4_SelfMonitorAge,Slot5_ExpectedMachineGracefulDrainingCompletion,Slot5_JobStarts,Slot5_SelfMonitorAge,Slot6_ExpectedMachineGracefulDrainingCompletion,Slot6_JobStarts,Slot6_SelfMonitorAge,Slot7_ExpectedMachineGracefulDrainingCompletion,Slot7_JobStarts,Slot7_SelfMonitorAge,Slot8_ExpectedMachineGracefulDrainingCompletion,Slot8_JobStarts,Slot8_SelfMonitorAge,Slot1_TotalTimeClaimedBusy,Slot1_TotalTimeUnclaimedIdle,Slot2_TotalTimeClaimedBusy,Slot2_TotalTimeUnclaimedIdle,Slot3_TotalTimeClaimedBusy,Slot3_TotalTimeUnclaimedIdle,Slot4_TotalTimeClaimedBusy,Slot4_TotalTimeUnclaimedIdle,Slot5_TotalTimeClaimedBusy,Slot5_TotalTimeUnclaimedIdle,Slot6_TotalTimeClaimedBusy,Slot6_TotalTimeUnclaimedIdle,Slot7_TotalTimeClaimedBusy,Slot7_Tot
 alTimeUnclaimedIdle,Slot8_TotalTimeClaimedBusy,Slot8_TotalTimeUnclaimedIdle,Slot10_ExpectedMachineGracefulDrainingCompletion,Slot10_JobStarts,Slot10_SelfMonitorAge,Slot11_ExpectedMachineGracefulDrainingCompletion,Slot11_JobStarts,Slot11_SelfMonitorAge,Slot12_ExpectedMachineGracefulDrainingCompletion,Slot12_JobStarts,Slot12_SelfMonitorAge,Slot9_ExpectedMachineGracefulDrainingCompletion,Slot9_JobStarts,Slot9_SelfMonitorAge,Slot12_TotalTimeClaimedBusy,Slot10_TotalTimeClaimedBusy,Slot10_TotalTimeUnclaimedIdle,Slot11_TotalTimeClaimedBusy,Slot11_TotalTimeUnclaimedIdle,Slot12_TotalTimeUnclaimedIdle,Slot9_TotalTimeClaimedBusy,Slot9_TotalTimeUnclaimedIdle,Slot13_ExpectedMachineGracefulDrainingCompletion,Slot13_JobStarts,Slot13_SelfMonitorAge,Slot14_ExpectedMachineGracefulDrainingCompletion,Slot14_JobStarts,Slot14_SelfMonitorAge,Slot15_ExpectedMachineGracefulDrainingCompletion,Slot15_JobStarts,Slot15_SelfMonitorAge,Slot16_ExpectedMachineGracefulDrainingCompletion,Slot16_JobStarts,Slot16_SelfM
 onitorAge,IsResumable,WHEN_TO_TRANSFER_OUTPUT,_condor_Requestadmin_mutex_1,_condor_Requestadmin_mutex_2,_condor_Requestadmin_mutex_3,_condor_Requestmachine_token,Requestadmin_mutex_1,Requestadmin_mutex_2,Requestadmin_mutex_3,Requestmachine_token,nyehle,IsBuildJob,IsMatlabBuildJob,TotalJobRunTime,NodeOnline,Slot13_TotalTimeClaimedBusy,Slot13_TotalTimeUnclaimedIdle,Slot14_TotalTimeClaimedBusy,Slot14_TotalTimeUnclaimedIdle,Slot15_TotalTimeClaimedBusy,Slot15_TotalTimeUnclaimedIdle,Slot16_TotalTimeClaimedBusy,Slot16_TotalTimeUnclaimedIdle,TmpIsFull,trResumable,RequiresCMSFrontier,Slot17_ExpectedMachineGracefulDrainingCompletion,Slot17_JobStarts,Slot17_SelfMonitorAge,Slot17_TotalTimeClaimedBusy,Slot17_TotalTimeUnclaimedIdle,Slot18_ExpectedMachineGracefulDrainingCompletion,Slot18_JobStarts,Slot18_SelfMonitorAge,Slot18_TotalTimeClaimedBusy,Slot18_TotalTimeUnclaimedIdle,Slot19_ExpectedMachineGracefulDrainingCompletion,Slot19_JobStarts,Slot19_SelfMonitorAge,Slot19_TotalTimeClaimedBusy,Slot19_
 TotalTimeUnclaimedIdle,Slot20_ExpectedMachineGracefulDrainingCompletion,Slot20_JobStarts,Slot20_SelfMonitorAge,Slot20_TotalTimeClaimedBusy,Slot20_TotalTimeUnclaimedIdle,Slot21_ExpectedMachineGracefulDrainingCompletion,Slot21_JobStarts,Slot21_SelfMonitorAge,Slot21_TotalTimeClaimedBusy,Slot21_TotalTimeUnclaimedIdle,Slot22_ExpectedMachineGracefulDrainingCompletion,Slot22_JobStarts,Slot22_SelfMonitorAge,Slot22_TotalTimeClaimedBusy,Slot22_TotalTimeUnclaimedIdle,Slot23_ExpectedMachineGracefulDrainingCompletion,Slot23_JobStarts,Slot23_SelfMonitorAge,Slot23_TotalTimeClaimedBusy,Slot23_TotalTimeUnclaimedIdle,Slot24_ExpectedMachineGracefulDrainingCompletion,Slot24_JobStarts,Slot24_SelfMonitorAge,Slot24_TotalTimeClaimedBusy,Slot24_TotalTimeUnclaimedIdle,Slot25_ExpectedMachineGracefulDrainingCompletion,Slot25_JobStarts,Slot25_SelfMonitorAge,Slot25_TotalTimeClaimedBusy,Slot25_TotalTimeUnclaimedIdle,Slot26_ExpectedMachineGracefulDrainingCompletion,Slot26_JobStarts,Slot26_SelfMonitorAge,Slot26_Tot
 alTimeClaimedBusy,Slot26_TotalTimeUnclaimedIdle,Slot27_ExpectedMachineGracefulDrainingCompletion,Slot27_JobStarts,Slot27_SelfMonitorAge,Slot27_TotalTimeClaimedBusy,Slot27_TotalTimeUnclaimedIdle,Slot28_ExpectedMachineGracefulDrainingCompletion,Slot28_JobStarts,Slot28_SelfMonitorAge,Slot28_TotalTimeClaimedBusy,Slot28_TotalTimeUnclaimedIdle,Slot29_ExpectedMachineGracefulDrainingCompletion,Slot29_JobStarts,Slot29_SelfMonitorAge,Slot29_TotalTimeClaimedBusy,Slot29_TotalTimeUnclaimedIdle,Slot30_ExpectedMachineGracefulDrainingCompletion,Slot30_JobStarts,Slot30_SelfMonitorAge,Slot30_TotalTimeClaimedBusy,Slot30_TotalTimeUnclaimedIdle,Slot31_ExpectedMachineGracefulDrainingCompletion,Slot31_JobStarts,Slot31_SelfMonitorAge,Slot31_TotalTimeClaimedBusy,Slot31_TotalTimeUnclaimedIdle,Slot32_ExpectedMachineGracefulDrainingCompletion,Slot32_JobStarts,Slot32_SelfMonitorAge,Slot32_TotalTimeClaimedBusy,Slot32_TotalTimeUnclaimedIdle,ResidentSetSize", "NumCkpts": 0, "Err": "process.err", "RecentBlockWrites
 ": 0, "JobFinishedHookDone": 1446133919, "ProcId": 0, "ImageSize": 1000000, "JobUniverse": 5, "EncryptExecuteDirectory": false, "TransferInputSizeMB": 0, "RecentBlockReads": 0, "SpooledOutputFiles": "CURLTIME_1114551,ChtcWrapper162.out,AuditLog.162,simu_3_162.txt,harvest.log,162.out", "BlockWriteKbytes": 0, "WhenToTransferOutput": "ON_EXIT", "JobCurrentStartExecutingDate": 1446107688, "ExitBySignal": false, "LastMatchTime": 1446107686, "OnExitHold": false, "OrigMaxHosts": 1, "RequestMemory": 1000, "NumJobStarts": 1, "TerminationPending": true, "TotalSuspensions": 0, "BlockReads": 0, "DAGManJobId": 49581933, "MemoryUsage": "( ( ResidentSetSize + 1023 ) / 1024 )", "ExitCode": 0, "JobNotification": 0, "BlockReadKbytes": 0, "NumJobMatches": 1, "LocalUserCpu": 0.0d, "LastJobStatus": 2, "BufferBlockSize": 32768, "CommittedTime": 26233, "ExecutableSize_RAW": 6, "LastSuspensionTime": 0, "Matlab": "R2011b", "UserLog": "/home/xguo23/finally_2/Simulation_condor/model_3/162/process.log", "DAGMa
 nNodesMask": "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27", "CumulativeSlotTime": 26233.0d, "LastJobLeaseRenewal": 1446133919, "MachineAttrSlotWeight0": 1, "NumSystemHolds": 0, "BytesRecvd": 285054.0d, "CondorPlatform": "$CondorPlatform: X86_64-RedHat_6.6 $", "JOBGLIDEIN_ResourceName": "$$([IfThenElse(IsUndefined(TARGET.GLIDEIN_ResourceName), IfThenElse(IsUndefined(TARGET.GLIDEIN_Site), \"wisc.edu\", TARGET.GLIDEIN_Site), TARGET.GLIDEIN_ResourceName)])", "DAGNodeName": "162+162", "PeriodicRelease": "( JobStatus == 5 ) && ( ( CurrentTime - EnteredCurrentStatus ) > 1800 ) && ( JobRunCount < 5 ) && ( HoldReasonCode != 6 ) && ( HoldReasonCode != 14 ) && ( HoldReasonCode != 22 )", "JobRunCount": 1, "LastRemoteHost": "slot1@e370.chtc.WISC.EDU", "JobPrio": 0, "LocalSysCpu": 0.0d, "ExecutableSize": 7, "RemoteSysCpu": 96.0d, "TransferInput": "/home/xguo23/finally_2/Simulation_condor/data/162/,/home/xguo23/finally_2/Simulation_condor/data/shared/", "PeriodicHold": false, "WantRemoteIO": true, "Comm
 ittedSuspensionTime": 0, "DAGParentNodeNames": "", "CompletionDate": 1446133919, "StreamErr": false, "RecentBlockReadKbytes": 0, "WantRemoteSyscalls": false, "NumShadowStarts": 1, "MachineAttrCpus0": 1, "DAGManNodesLog": "/home/xguo23/finally_2/Simulation_condor/model_3/./mydag.dag.nodes.log", "Owner": "xguo23", "Requirements": "( ( OpSysMajorVer is 6 ) ) && ( MY.JobUniverse == 12 || MY.JobUniverse == 7 || MY.WantFlocking || MY.WantGlidein || TARGET.PoolName == \"CHTC\" || TARGET.COLLECTOR_HOST_STRING == \"infopool.cs.wisc.edu\" ) && ( TARGET.Arch == \"X86_64\" ) && ( TARGET.OpSys == \"LINUX\" ) && ( TARGET.Disk >= RequestDisk ) && ( TARGET.Memory >= RequestMemory ) && ( TARGET.HasFileTransfer )", "DiskUsage": 1250000, "JobLeaseDuration": 2400, "ClusterId": 49582148, "BufferSize": 524288, "IsCHTCSubmit": true, "RecentStatsLifetimeStarter": 1200, "CommittedSlotTime": 26233.0d, "Args": "--type=Matlab --version=R2011b --cmdtorun=simu_condor --unique=162 -- 3", "Environment": "", "LastP
 ublicClaimId": "<128.105.245.170:9482>#1443991414#13008#...", "Iwd": "/home/xguo23/finally_2/Simulation_condor/model_3/162", "QDate": 1446105547, "CurrentHosts": 0, "User": "xguo23@chtc.wisc.edu", "StreamOut": false }
-{ "GlobalJobId": "submit-3.chtc.wisc.edu#49582154.0#1446105553", "StatsLifetimeStarter": 25874, "JobStartDate": 1446107686, "SubmitEventNotes": "DAG Node: 333+333", "JobStatus": 4, "LeaveJobInQueue": false, "StartdPrincipal": "execute-side@matchsession/128.105.245.120", "WantRHEL6": true, "OnExitRemove": true, "JobCurrentStartDate": 1446107686, "CoreSize": 0, "MATCH_EXP_JOBGLIDEIN_ResourceName": "wisc.edu", "Rank": 0.0d, "ExitStatus": 0, "ResidentSetSize": 150000, "RemoteWallClockTime": 25876.0d, "WantCheckpoint": false, "In": "/dev/null", "MaxHosts": 1, "RootDir": "/", "NumRestarts": 0, "RecentBlockWriteKbytes": 0, "DiskUsage_RAW": 1216669, "EnteredCurrentStatus": 1446133562, "ResidentSetSize_RAW": 125740, "RequestDisk": 1000000, "MyType": "Job", "PeriodicRemove": false, "Cmd": "/home/xguo23/finally_2/Simulation_condor/chtcjobwrapper", "CondorVersion": "$CondorVersion: 8.5.0 Sep 16 2015 BuildID: 341710 $", "ShouldTransferFiles": "YES", "TargetType": "Machine", "MinHosts": 1, "NumCk
 pts_RAW": 0, "RequestCpus": 1, "RemoteUserCpu": 25692.0d, "BlockWrites": 0, "NiceUser": false, "Out": "process.out", "ImageSize_RAW": 811948, "BytesSent": 30542.0d, "CumulativeSuspensionTime": 0, "TransferIn": false, "NumCkpts": 0, "Err": "process.err", "RecentBlockWrites": 0, "JobFinishedHookDone": 1446133562, "ProcId": 0, "ImageSize": 1000000, "JobUniverse": 5, "EncryptExecuteDirectory": false, "TransferInputSizeMB": 0, "RecentBlockReads": 0, "SpooledOutputFiles": "CURLTIME_523030,ChtcWrapper333.out,AuditLog.333,simu_3_333.txt,harvest.log,333.out", "BlockWriteKbytes": 0, "WhenToTransferOutput": "ON_EXIT", "JobCurrentStartExecutingDate": 1446107688, "ExitBySignal": false, "LastMatchTime": 1446107686, "OnExitHold": false, "OrigMaxHosts": 1, "RequestMemory": 1000, "NumJobStarts": 1, "TerminationPending": true, "TotalSuspensions": 0, "BlockReads": 0, "DAGManJobId": 49581933, "MemoryUsage": "( ( ResidentSetSize + 1023 ) / 1024 )", "ExitCode": 0, "JobNotification": 0, "BlockReadKbytes":
  0, "NumJobMatches": 1, "LocalUserCpu": 0.0d, "LastJobStatus": 2, "BufferBlockSize": 32768, "CommittedTime": 25876, "ExecutableSize_RAW": 6, "LastSuspensionTime": 0, "Matlab": "R2011b", "UserLog": "/home/xguo23/finally_2/Simulation_condor/model_3/333/process.log", "DAGManNodesMask": "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27", "CumulativeSlotTime": 25876.0d, "LastJobLeaseRenewal": 1446133562, "MachineAttrSlotWeight0": 1, "NumSystemHolds": 0, "BytesRecvd": 285054.0d, "CondorPlatform": "$CondorPlatform: X86_64-RedHat_6.6 $", "JOBGLIDEIN_ResourceName": "$$([IfThenElse(IsUndefined(TARGET.GLIDEIN_ResourceName), IfThenElse(IsUndefined(TARGET.GLIDEIN_Site), \"wisc.edu\", TARGET.GLIDEIN_Site), TARGET.GLIDEIN_ResourceName)])", "DAGNodeName": "333+333", "PeriodicRelease": "( Jo

<TRUNCATED>


[43/50] [abbrv] incubator-asterixdb git commit: Move merged files

Posted by im...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d630d1a2/asterix-external-data/src/test/resources/classad-with-temporals.classads
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/test/resources/classad-with-temporals.classads b/asterix-external-data/src/test/resources/classad-with-temporals.classads
deleted file mode 100644
index e20be09..0000000
--- a/asterix-external-data/src/test/resources/classad-with-temporals.classads
+++ /dev/null
@@ -1,134 +0,0 @@
-
-    [
-        Schedd = "submit-5.chtc.wisc.edu";
-        BlockWrites = 3;
-        LastJobStatus = 2;
-        JobCurrentStartExecutingDate = 1459300924;
-        WantRemoteIO = true;
-        RequestCpus = 1;
-        NumShadowStarts = 1;
-        RemoteUserCpu = 6.607100000000000E+04;
-        NiceUser = false;
-        BytesRecvd = 7.292000000000000E+03;
-        RequestMemory = 12288;
-        ResidentSetSize = 750000;
-        StreamOut = false;
-        SpooledOutputFiles = "job697_results.tar.gz";
-        Arguments = "";
-        OnExitRemove = true;
-        ImageSize_RAW = 607024;
-        RemoteWallClockTime = 6.629100000000000E+04;
-        MachineAttrSlotWeight0 = 1;
-        ExecutableSize = 4;
-        JobStatus = 4;
-        DAGParentNodeNames = "";
-        ExitCode = 0;
-        DAGManNodesMask = "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27";
-        BytesSent = 8.580547200000000E+07;
-        LastRejMatchReason = "no match found ";
-        LastSuspensionTime = 0;
-        ExecutableSize_RAW = 4;
-        RecentBlockReadKbytes = 0;
-        TransferInputSizeMB = 0;
-        BlockReadKbytes = 0;
-        LocalSysCpu = 0.0;
-        Iwd = "/home/grandaduarte/mars/mhb1";
-        Cmd = "/home/grandaduarte/mars/mhb1/job697.sh";
-        CommittedSuspensionTime = 0;
-        RecentStatsLifetimeStarter = 1200;
-        TargetType = "Machine";
-        WhenToTransferOutput = "ON_EXIT";
-        BufferSize = 524288;
-        JobCurrentStartTransferOutputDate = 1459367212;
-        RecentBlockWrites = 0;
-        CompletionDate = 1459367213;
-        LastMatchTime = 1459300922;
-        LastJobLeaseRenewal = 1459367213;
-        DAGManNodesLog = "/home/grandaduarte/mars/mhb1/./dagman.dag.nodes.log";
-        ClusterId = 16798777;
-        JobUniverse = 5;
-        NumJobStarts = 1;
-        ProcId = 0;
-        PeriodicHold = false;
-        CondorPlatform = "$CondorPlatform: x86_64_RedHat6 $";
-        JobFinishedHookDone = 1459367213;
-        In = "/dev/null";
-        DiskUsage = 7500000;
-        EncryptExecuteDirectory = false;
-        User = "grandaduarte@chtc.wisc.edu";
-        LeaveJobInQueue = false;
-        Requirements = ( MY.JobUniverse == 12 || MY.JobUniverse == 7 || ( ( MY.WantFlocking || MY.WantGlidein || TARGET.PoolName == "CHTC" ) && ( TARGET.OpSysMajorVer == MY.LinuxVer || TARGET.OpSysMajorVer == MY.LinuxVerAlt || TARGET.OpSysMajorVer == MY.WinVer ) ) ) && ( TARGET.Arch == "X86_64" ) && ( TARGET.OpSys == "LINUX" ) && ( TARGET.Disk >= RequestDisk ) && ( TARGET.Memory >= RequestMemory ) && ( TARGET.HasFileTransfer );
-        MinHosts = 1;
-        MaxHosts = 1;
-        StartdPrincipal = "execute-side@matchsession/128.105.245.175";
-        JOBGLIDEIN_ResourceName = "$$([IfThenElse(IsUndefined(TARGET.GLIDEIN_ResourceName), IfThenElse(IsUndefined(TARGET.GLIDEIN_Site), \"wisc.edu\", TARGET.GLIDEIN_Site), TARGET.GLIDEIN_ResourceName)])";
-        EnteredCurrentStatus = 1459367213;
-        JobLeaseDuration = 2400;
-        QDate = 1459298672;
-        AccountingGroup = EngrPhysics_Wilson;
-        MemoryUsage = ( ( ResidentSetSize + 1023 ) / 1024 );
-        TerminationPending = true;
-        NumRestarts = 0;
-        NumSystemHolds = 0;
-        Environment = "";
-        LinuxVer = 6;
-        DAGNodeName = "_mars_MH1B1_661.inp";
-        CoreSize = 0;
-        OnExitHold = false;
-        CondorVersion = "$CondorVersion: 8.5.3 Mar 14 2016 BuildID: 358989 $";
-        UserLog = "/home/grandaduarte/mars/mhb1/job697.log";
-        JobCurrentStartDate = 1459300922;
-        MATCH_EXP_JOBGLIDEIN_ResourceName = "wisc.edu";
-        BufferBlockSize = 32768;
-        BlockWriteKbytes = 24;
-        ExitBySignal = false;
-        DAGManJobId = 16795779;
-        MachineAttrCpus0 = 1;
-        WantRemoteSyscalls = false;
-        CumulativeSuspensionTime = 0;
-        MyType = "Job";
-        Rank = 0.0;
-        JobNotification = 0;
-        Owner = "grandaduarte";
-        LinuxVerAlt = 6;
-        Err = "job697.err";
-        PeriodicRemove = false;
-        CommittedTime = 66291;
-        RecentBlockWriteKbytes = 0;
-        TransferIn = false;
-        ExitStatus = 0;
-        ShouldTransferFiles = "YES";
-        IsCHTCSubmit = true;
-        NumJobMatches = 1;
-        RootDir = "/";
-        JobStartDate = 1459300922;
-        JobPrio = 0;
-        CurrentHosts = 0;
-        GlobalJobId = "submit-5.chtc.wisc.edu#16798777.0#1459298672";
-        RemoteSysCpu = 6.100000000000000E+01;
-        LastRejMatchTime = 1459300921;
-        TotalSuspensions = 0;
-        CommittedSlotTime = 6.629100000000000E+04;
-        WantCheckpoint = false;
-        BlockReads = 0;
-        LastRemoteHost = "slot1_7@e375.chtc.wisc.edu";
-        TransferInput = "job697.sh";
-        LocalUserCpu = 0.0;
-        PeriodicRelease = false;
-        WinVer = 601;
-        LastPublicClaimId = "<128.105.245.175:9618>#1457031418#19008#...";
-        NumCkpts_RAW = 0;
-        Out = "job697.out";
-        SubmitEventNotes = "DAG Node: _mars_MH1B1_661.inp";
-        CumulativeSlotTime = 6.629100000000000E+04;
-        JobRunCount = 1;
-        RecentBlockReads = 0;
-        StreamErr = false;
-        DiskUsage_RAW = 6625678;
-        RequestDisk = 20971520;
-        ResidentSetSize_RAW = 597536;
-        OrigMaxHosts = 1;
-        NumCkpts = 0;
-        StatsLifetimeStarter = 66289;
-        ImageSize = 750000
-    ]
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d630d1a2/asterix-external-data/src/test/resources/results/classad-with-temporals.adm
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/test/resources/results/classad-with-temporals.adm b/asterix-external-data/src/test/resources/results/classad-with-temporals.adm
deleted file mode 100644
index 3cd630b..0000000
--- a/asterix-external-data/src/test/resources/results/classad-with-temporals.adm
+++ /dev/null
@@ -1 +0,0 @@
-{ "GlobalJobId": "submit-5.chtc.wisc.edu#16798777.0#1459298672", "Owner": "grandaduarte", "ClusterId": 16798777i32, "ProcId": 0i32, "RemoteWallClockTime": duration("PT18H24M51S"), "CompletionDate": datetime("2016-03-30T19:46:53.000Z"), "QDate": datetime("2016-03-30T00:44:32.000Z"), "JobCurrentStartDate": datetime("2016-03-30T01:22:02.000Z"), "JobStartDate": datetime("2016-03-30T01:22:02.000Z"), "JobCurrentStartExecutingDate": datetime("2016-03-30T01:22:04.000Z"), "StatsLifetimeStarter": 66289, "SubmitEventNotes": "DAG Node: _mars_MH1B1_661.inp", "JobStatus": 4, "LeaveJobInQueue": false, "StartdPrincipal": "execute-side@matchsession/128.105.245.175", "OnExitRemove": true, "CoreSize": 0, "MATCH_EXP_JOBGLIDEIN_ResourceName": "wisc.edu", "Rank": 0.0d, "ExitStatus": 0, "ResidentSetSize": 750000, "WantCheckpoint": false, "In": "/dev/null", "MaxHosts": 1, "RootDir": "/", "NumRestarts": 0, "RecentBlockWriteKbytes": 0, "DiskUsage_RAW": 6625678, "EnteredCurrentStatus": 1459367213, "ResidentSe
 tSize_RAW": 597536, "RequestDisk": 20971520, "MyType": "Job", "PeriodicRemove": false, "Cmd": "/home/grandaduarte/mars/mhb1/job697.sh", "CondorVersion": "$CondorVersion: 8.5.3 Mar 14 2016 BuildID: 358989 $", "ShouldTransferFiles": "YES", "TargetType": "Machine", "MinHosts": 1, "NumCkpts_RAW": 0, "RequestCpus": 1, "WinVer": 601, "RemoteUserCpu": 66071.0d, "BlockWrites": 3, "NiceUser": false, "Out": "job697.out", "ImageSize_RAW": 607024, "BytesSent": 8.5805472E7d, "CumulativeSuspensionTime": 0, "TransferIn": false, "NumCkpts": 0, "Err": "job697.err", "RecentBlockWrites": 0, "JobFinishedHookDone": 1459367213, "ImageSize": 750000, "Schedd": "submit-5.chtc.wisc.edu", "JobUniverse": 5, "EncryptExecuteDirectory": false, "TransferInputSizeMB": 0, "RecentBlockReads": 0, "SpooledOutputFiles": "job697_results.tar.gz", "BlockWriteKbytes": 24, "WhenToTransferOutput": "ON_EXIT", "ExitBySignal": false, "LastMatchTime": 1459300922, "OnExitHold": false, "OrigMaxHosts": 1, "RequestMemory": 12288, "Nu
 mJobStarts": 1, "TerminationPending": true, "TotalSuspensions": 0, "BlockReads": 0, "DAGManJobId": 16795779, "MemoryUsage": "( ( ResidentSetSize + 1023 ) / 1024 )", "ExitCode": 0, "JobNotification": 0, "BlockReadKbytes": 0, "AccountingGroup": "EngrPhysics_Wilson", "NumJobMatches": 1, "LocalUserCpu": 0.0d, "LastJobStatus": 2, "BufferBlockSize": 32768, "CommittedTime": 66291, "ExecutableSize_RAW": 4, "LastRejMatchReason": "no match found ", "LastSuspensionTime": 0, "UserLog": "/home/grandaduarte/mars/mhb1/job697.log", "DAGManNodesMask": "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27", "CumulativeSlotTime": 66291.0d, "LastJobLeaseRenewal": 1459367213, "MachineAttrSlotWeight0": 1, "NumSystemHolds": 0, "BytesRecvd": 7292.0d, "CondorPlatform": "$CondorPlatform: x86_64_RedHat6 $", "JOBGLIDEIN_ResourceName": "$$([IfThenElse(IsUndefined(TARGET.GLIDEIN_ResourceName), IfThenElse(IsUndefined(TARGET.GLIDEIN_Site), \"wisc.edu\", TARGET.GLIDEIN_Site), TARGET.GLIDEIN_ResourceName)])", "DAGNodeName": "_mars
 _MH1B1_661.inp", "PeriodicRelease": false, "JobRunCount": 1, "LastRemoteHost": "slot1_7@e375.chtc.wisc.edu", "JobPrio": 0, "LocalSysCpu": 0.0d, "ExecutableSize": 4, "RemoteSysCpu": 61.0d, "TransferInput": "job697.sh", "PeriodicHold": false, "WantRemoteIO": true, "CommittedSuspensionTime": 0, "DAGParentNodeNames": "", "StreamErr": false, "RecentBlockReadKbytes": 0, "WantRemoteSyscalls": false, "NumShadowStarts": 1, "MachineAttrCpus0": 1, "DAGManNodesLog": "/home/grandaduarte/mars/mhb1/./dagman.dag.nodes.log", "Requirements": "( MY.JobUniverse == 12 || MY.JobUniverse == 7 || ( ( MY.WantFlocking || MY.WantGlidein || TARGET.PoolName == \"CHTC\" ) && ( TARGET.OpSysMajorVer == MY.LinuxVer || TARGET.OpSysMajorVer == MY.LinuxVerAlt || TARGET.OpSysMajorVer == MY.WinVer ) ) ) && ( TARGET.Arch == \"X86_64\" ) && ( TARGET.OpSys == \"LINUX\" ) && ( TARGET.Disk >= RequestDisk ) && ( TARGET.Memory >= RequestMemory ) && ( TARGET.HasFileTransfer )", "LinuxVerAlt": 6, "DiskUsage": 7500000, "LinuxVer"
 : 6, "LastRejMatchTime": 1459300921, "JobLeaseDuration": 2400, "BufferSize": 524288, "IsCHTCSubmit": true, "JobCurrentStartTransferOutputDate": 1459367212, "RecentStatsLifetimeStarter": 1200, "CommittedSlotTime": 66291.0d, "Environment": "", "LastPublicClaimId": "<128.105.245.175:9618>#1457031418#19008#...", "Iwd": "/home/grandaduarte/mars/mhb1", "CurrentHosts": 0, "Arguments": "", "User": "grandaduarte@chtc.wisc.edu", "StreamOut": false }

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d630d1a2/asterix-installer/src/test/resources/transactionts/queries/query_after_restart/dataset-with-meta-record/dataset-with-meta-record.1.script.aql
----------------------------------------------------------------------
diff --git a/asterix-installer/src/test/resources/transactionts/queries/query_after_restart/dataset-with-meta-record/dataset-with-meta-record.1.script.aql b/asterix-installer/src/test/resources/transactionts/queries/query_after_restart/dataset-with-meta-record/dataset-with-meta-record.1.script.aql
deleted file mode 100644
index cc46136..0000000
--- a/asterix-installer/src/test/resources/transactionts/queries/query_after_restart/dataset-with-meta-record/dataset-with-meta-record.1.script.aql
+++ /dev/null
@@ -1 +0,0 @@
-create_and_start.sh

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d630d1a2/asterix-installer/src/test/resources/transactionts/queries/query_after_restart/dataset-with-meta-record/dataset-with-meta-record.2.ddl.aql
----------------------------------------------------------------------
diff --git a/asterix-installer/src/test/resources/transactionts/queries/query_after_restart/dataset-with-meta-record/dataset-with-meta-record.2.ddl.aql b/asterix-installer/src/test/resources/transactionts/queries/query_after_restart/dataset-with-meta-record/dataset-with-meta-record.2.ddl.aql
deleted file mode 100644
index d3317e4..0000000
--- a/asterix-installer/src/test/resources/transactionts/queries/query_after_restart/dataset-with-meta-record/dataset-with-meta-record.2.ddl.aql
+++ /dev/null
@@ -1,58 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-/*
- * Description  : Create a change feed with meta-data and test ingestion of records
- * Expected Res : Success
- * Date         : 24th Feb 2016
- */
-
-drop dataverse KeyVerse if exists;
-create dataverse KeyVerse;
-use dataverse KeyVerse;
-
-create type DocumentType as open{
-};
-
-create type KVMetaType as open{
-"key":string,
-bucket:string,
-vbucket:int32,
-seq:int64,
-cas:int64,
-creationTime:int64,
-expiration:int32,
-flags:int32,
-revSeq:int64,
-lockTime:int32
-};
-
-create dataset KVStore(DocumentType) with meta(KVMetaType)primary key meta()."key";
-
-create feed KVChangeStream using adapter(
-    ("type-name"="DocumentType"),
-    ("meta-type-name"="KVMetaType"),
-    ("reader"="kv_test"),
-    ("parser"="record-with-metadata"),
-    ("format"="dcp"),
-    ("record-format"="json"),
-    ("change-feed"="true"),
-    ("key-indexes"="0"),
-    ("key-indicators"="1"),
-    ("num-of-records"="1000")
-);
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d630d1a2/asterix-installer/src/test/resources/transactionts/queries/query_after_restart/dataset-with-meta-record/dataset-with-meta-record.3.update.aql
----------------------------------------------------------------------
diff --git a/asterix-installer/src/test/resources/transactionts/queries/query_after_restart/dataset-with-meta-record/dataset-with-meta-record.3.update.aql b/asterix-installer/src/test/resources/transactionts/queries/query_after_restart/dataset-with-meta-record/dataset-with-meta-record.3.update.aql
deleted file mode 100644
index 7faf013..0000000
--- a/asterix-installer/src/test/resources/transactionts/queries/query_after_restart/dataset-with-meta-record/dataset-with-meta-record.3.update.aql
+++ /dev/null
@@ -1,27 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-/*
- * Description  : Create a change feed with meta-data and test ingestion of records
- * Expected Res : Success
- * Date         : 24th Feb 2016
- */
-use dataverse KeyVerse;
-
-set wait-for-completion-feed "true";
-connect feed KVChangeStream to dataset KVStore;
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d630d1a2/asterix-installer/src/test/resources/transactionts/queries/query_after_restart/dataset-with-meta-record/dataset-with-meta-record.4.script.aql
----------------------------------------------------------------------
diff --git a/asterix-installer/src/test/resources/transactionts/queries/query_after_restart/dataset-with-meta-record/dataset-with-meta-record.4.script.aql b/asterix-installer/src/test/resources/transactionts/queries/query_after_restart/dataset-with-meta-record/dataset-with-meta-record.4.script.aql
deleted file mode 100644
index 3ba1dc0..0000000
--- a/asterix-installer/src/test/resources/transactionts/queries/query_after_restart/dataset-with-meta-record/dataset-with-meta-record.4.script.aql
+++ /dev/null
@@ -1 +0,0 @@
-stop_and_start.sh

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d630d1a2/asterix-installer/src/test/resources/transactionts/queries/query_after_restart/dataset-with-meta-record/dataset-with-meta-record.5.query.aql
----------------------------------------------------------------------
diff --git a/asterix-installer/src/test/resources/transactionts/queries/query_after_restart/dataset-with-meta-record/dataset-with-meta-record.5.query.aql b/asterix-installer/src/test/resources/transactionts/queries/query_after_restart/dataset-with-meta-record/dataset-with-meta-record.5.query.aql
deleted file mode 100644
index 9db20a9..0000000
--- a/asterix-installer/src/test/resources/transactionts/queries/query_after_restart/dataset-with-meta-record/dataset-with-meta-record.5.query.aql
+++ /dev/null
@@ -1,29 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-/*
- * Description  : Create a change feed and test ingestion of records
- * Expected Res : Success
- * Date         : 24th Feb 2016
- */
-use dataverse KeyVerse;
-
-count(
-    for $d in dataset KVStore
-    return $d
-);
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d630d1a2/asterix-installer/src/test/resources/transactionts/queries/query_after_restart/dataset-with-meta-record/dataset-with-meta-record.6.script.aql
----------------------------------------------------------------------
diff --git a/asterix-installer/src/test/resources/transactionts/queries/query_after_restart/dataset-with-meta-record/dataset-with-meta-record.6.script.aql b/asterix-installer/src/test/resources/transactionts/queries/query_after_restart/dataset-with-meta-record/dataset-with-meta-record.6.script.aql
deleted file mode 100644
index 10e1a51..0000000
--- a/asterix-installer/src/test/resources/transactionts/queries/query_after_restart/dataset-with-meta-record/dataset-with-meta-record.6.script.aql
+++ /dev/null
@@ -1 +0,0 @@
-stop_and_delete.sh

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d630d1a2/asterix-installer/src/test/resources/transactionts/results/query_after_restart/dataset-with-meta-record/dataset-with-meta-record.5.adm
----------------------------------------------------------------------
diff --git a/asterix-installer/src/test/resources/transactionts/results/query_after_restart/dataset-with-meta-record/dataset-with-meta-record.5.adm b/asterix-installer/src/test/resources/transactionts/results/query_after_restart/dataset-with-meta-record/dataset-with-meta-record.5.adm
deleted file mode 100644
index c31da8b..0000000
--- a/asterix-installer/src/test/resources/transactionts/results/query_after_restart/dataset-with-meta-record/dataset-with-meta-record.5.adm
+++ /dev/null
@@ -1 +0,0 @@
-804
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d630d1a2/asterix-installer/src/test/resources/transactionts/scripts/query_after_restart/dataset-with-meta-record/create_and_start.sh
----------------------------------------------------------------------
diff --git a/asterix-installer/src/test/resources/transactionts/scripts/query_after_restart/dataset-with-meta-record/create_and_start.sh b/asterix-installer/src/test/resources/transactionts/scripts/query_after_restart/dataset-with-meta-record/create_and_start.sh
deleted file mode 100755
index 945f01d..0000000
--- a/asterix-installer/src/test/resources/transactionts/scripts/query_after_restart/dataset-with-meta-record/create_and_start.sh
+++ /dev/null
@@ -1 +0,0 @@
-$MANAGIX_HOME/bin/managix create -n nc1 -c $MANAGIX_HOME/clusters/local/local.xml;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d630d1a2/asterix-installer/src/test/resources/transactionts/scripts/query_after_restart/dataset-with-meta-record/stop_and_delete.sh
----------------------------------------------------------------------
diff --git a/asterix-installer/src/test/resources/transactionts/scripts/query_after_restart/dataset-with-meta-record/stop_and_delete.sh b/asterix-installer/src/test/resources/transactionts/scripts/query_after_restart/dataset-with-meta-record/stop_and_delete.sh
deleted file mode 100755
index d7deea3..0000000
--- a/asterix-installer/src/test/resources/transactionts/scripts/query_after_restart/dataset-with-meta-record/stop_and_delete.sh
+++ /dev/null
@@ -1,3 +0,0 @@
-$MANAGIX_HOME/bin/managix stop -n nc1;
-$MANAGIX_HOME/bin/managix delete -n nc1;
-

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d630d1a2/asterix-installer/src/test/resources/transactionts/scripts/query_after_restart/dataset-with-meta-record/stop_and_start.sh
----------------------------------------------------------------------
diff --git a/asterix-installer/src/test/resources/transactionts/scripts/query_after_restart/dataset-with-meta-record/stop_and_start.sh b/asterix-installer/src/test/resources/transactionts/scripts/query_after_restart/dataset-with-meta-record/stop_and_start.sh
deleted file mode 100755
index 1271a2b..0000000
--- a/asterix-installer/src/test/resources/transactionts/scripts/query_after_restart/dataset-with-meta-record/stop_and_start.sh
+++ /dev/null
@@ -1,2 +0,0 @@
-$MANAGIX_HOME/bin/managix stop -n nc1;
-$MANAGIX_HOME/bin/managix start -n nc1;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d630d1a2/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/rewrites/visitor/InlineColumnAliasVisitor.java
----------------------------------------------------------------------
diff --git a/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/rewrites/visitor/InlineColumnAliasVisitor.java b/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/rewrites/visitor/InlineColumnAliasVisitor.java
deleted file mode 100644
index bb8c149..0000000
--- a/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/rewrites/visitor/InlineColumnAliasVisitor.java
+++ /dev/null
@@ -1,450 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.asterix.lang.sqlpp.rewrites.visitor;
-
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.Map.Entry;
-
-import org.apache.asterix.common.exceptions.AsterixException;
-import org.apache.asterix.lang.common.base.Expression;
-import org.apache.asterix.lang.common.base.Expression.Kind;
-import org.apache.asterix.lang.common.base.Literal;
-import org.apache.asterix.lang.common.clause.GroupbyClause;
-import org.apache.asterix.lang.common.clause.LetClause;
-import org.apache.asterix.lang.common.clause.LimitClause;
-import org.apache.asterix.lang.common.clause.OrderbyClause;
-import org.apache.asterix.lang.common.clause.WhereClause;
-import org.apache.asterix.lang.common.expression.CallExpr;
-import org.apache.asterix.lang.common.expression.FieldAccessor;
-import org.apache.asterix.lang.common.expression.FieldBinding;
-import org.apache.asterix.lang.common.expression.GbyVariableExpressionPair;
-import org.apache.asterix.lang.common.expression.IfExpr;
-import org.apache.asterix.lang.common.expression.IndexAccessor;
-import org.apache.asterix.lang.common.expression.ListConstructor;
-import org.apache.asterix.lang.common.expression.LiteralExpr;
-import org.apache.asterix.lang.common.expression.OperatorExpr;
-import org.apache.asterix.lang.common.expression.QuantifiedExpression;
-import org.apache.asterix.lang.common.expression.RecordConstructor;
-import org.apache.asterix.lang.common.expression.UnaryExpr;
-import org.apache.asterix.lang.common.expression.VariableExpr;
-import org.apache.asterix.lang.common.parser.ScopeChecker;
-import org.apache.asterix.lang.common.rewrites.LangRewritingContext;
-import org.apache.asterix.lang.common.rewrites.VariableSubstitutionEnvironment;
-import org.apache.asterix.lang.common.statement.FunctionDecl;
-import org.apache.asterix.lang.common.statement.Query;
-import org.apache.asterix.lang.common.struct.QuantifiedPair;
-import org.apache.asterix.lang.sqlpp.clause.AbstractBinaryCorrelateClause;
-import org.apache.asterix.lang.sqlpp.clause.FromClause;
-import org.apache.asterix.lang.sqlpp.clause.FromTerm;
-import org.apache.asterix.lang.sqlpp.clause.HavingClause;
-import org.apache.asterix.lang.sqlpp.clause.JoinClause;
-import org.apache.asterix.lang.sqlpp.clause.NestClause;
-import org.apache.asterix.lang.sqlpp.clause.Projection;
-import org.apache.asterix.lang.sqlpp.clause.SelectBlock;
-import org.apache.asterix.lang.sqlpp.clause.SelectClause;
-import org.apache.asterix.lang.sqlpp.clause.SelectElement;
-import org.apache.asterix.lang.sqlpp.clause.SelectRegular;
-import org.apache.asterix.lang.sqlpp.clause.SelectSetOperation;
-import org.apache.asterix.lang.sqlpp.clause.UnnestClause;
-import org.apache.asterix.lang.sqlpp.expression.SelectExpression;
-import org.apache.asterix.lang.sqlpp.struct.SetOperationRight;
-import org.apache.asterix.lang.sqlpp.util.SqlppRewriteUtil;
-import org.apache.asterix.lang.sqlpp.util.SqlppVariableSubstitutionUtil;
-import org.apache.asterix.lang.sqlpp.util.SqlppVariableUtil;
-import org.apache.asterix.lang.sqlpp.visitor.base.AbstractSqlppQueryExpressionVisitor;
-
-public class InlineColumnAliasVisitor extends AbstractSqlppQueryExpressionVisitor<Void, Boolean> {
-
-    private final ScopeChecker scopeChecker = new ScopeChecker();
-    private final LangRewritingContext context;
-
-    public InlineColumnAliasVisitor(LangRewritingContext context) {
-        this.context = context;
-    }
-
-    @Override
-    public Void visit(WhereClause whereClause, Boolean arg) throws AsterixException {
-        whereClause.getWhereExpr().accept(this, arg);
-        return null;
-    }
-
-    @Override
-    public Void visit(FromClause fromClause, Boolean arg) throws AsterixException {
-        for (FromTerm fromTerm : fromClause.getFromTerms()) {
-            fromTerm.accept(this, arg);
-        }
-        return null;
-    }
-
-    @Override
-    public Void visit(FromTerm fromTerm, Boolean arg) throws AsterixException {
-        fromTerm.getLeftExpression().accept(this, arg);
-        // A from binding variable will override the alias to substitute.
-        scopeChecker.getCurrentScope().removeSymbolExpressionMapping(fromTerm.getLeftVariable());
-        if (fromTerm.hasPositionalVariable()) {
-            scopeChecker.getCurrentScope().removeSymbolExpressionMapping(fromTerm.getPositionalVariable());
-        }
-
-        for (AbstractBinaryCorrelateClause correlate : fromTerm.getCorrelateClauses()) {
-            correlate.accept(this, arg);
-        }
-        return null;
-    }
-
-    @Override
-    public Void visit(JoinClause joinClause, Boolean arg) throws AsterixException {
-        joinClause.getRightExpression().accept(this, arg);
-        removeSubsutitions(joinClause);
-        joinClause.getConditionExpression().accept(this, arg);
-        return null;
-    }
-
-    @Override
-    public Void visit(NestClause nestClause, Boolean arg) throws AsterixException {
-        nestClause.getRightExpression().accept(this, arg);
-        nestClause.getConditionExpression().accept(this, arg);
-        removeSubsutitions(nestClause);
-        return null;
-    }
-
-    @Override
-    public Void visit(UnnestClause unnestClause, Boolean arg) throws AsterixException {
-        unnestClause.getRightExpression().accept(this, arg);
-        removeSubsutitions(unnestClause);
-        return null;
-    }
-
-    @Override
-    public Void visit(Projection projection, Boolean arg) throws AsterixException {
-        projection.getExpression().accept(this, arg);
-        VariableExpr columnAlias = new VariableExpr(
-                SqlppVariableUtil.toInternalVariableIdentifier(projection.getName()));
-        VariableSubstitutionEnvironment env = scopeChecker.getCurrentScope().getVarSubstitutionEnvironment();
-        Expression gbyKey = (Expression) SqlppRewriteUtil.deepCopy(env.findSubstituion(columnAlias));
-        if (arg) {
-            scopeChecker.getCurrentScope().addSymbolExpressionMappingToScope(columnAlias, projection.getExpression());
-        } else {
-            if (gbyKey != null) {
-                projection.setExpression(gbyKey);
-            }
-        }
-        return null;
-    }
-
-    @Override
-    public Void visit(SelectBlock selectBlock, Boolean arg) throws AsterixException {
-        // Traverses the select block in the order of "select", "group-by",
-        // "group-by" lets and "having".
-        selectBlock.getSelectClause().accept(this, true);
-
-        if (selectBlock.hasFromClause()) {
-            selectBlock.getFromClause().accept(this, arg);
-        }
-        if (selectBlock.hasLetClauses()) {
-            for (LetClause letClause : selectBlock.getLetList()) {
-                letClause.accept(this, arg);
-            }
-        }
-        if (selectBlock.hasGroupbyClause()) {
-            selectBlock.getGroupbyClause().accept(this, arg);
-        }
-        if (selectBlock.hasLetClausesAfterGroupby()) {
-            for (LetClause letClauseAfterGby : selectBlock.getLetListAfterGroupby()) {
-                letClauseAfterGby.accept(this, true);
-            }
-        }
-        if (selectBlock.hasHavingClause()) {
-            selectBlock.getHavingClause().accept(this, arg);
-        }
-
-        // Visit select clause again to overwrite projection expressions if the group-by clause is rewritten.
-        selectBlock.getSelectClause().accept(this, false);
-        return null;
-    }
-
-    @Override
-    public Void visit(SelectClause selectClause, Boolean arg) throws AsterixException {
-        if (selectClause.selectElement()) {
-            selectClause.getSelectElement().accept(this, arg);
-        }
-        if (selectClause.selectRegular()) {
-            selectClause.getSelectRegular().accept(this, arg);
-        }
-        return null;
-    }
-
-    @Override
-    public Void visit(SelectElement selectElement, Boolean arg) throws AsterixException {
-        Expression expr = selectElement.getExpression();
-        expr.accept(this, arg);
-        if (expr.getKind() == Kind.RECORD_CONSTRUCTOR_EXPRESSION) {
-            // To be consistent with SelectRegular.
-            mapForRecordConstructor(arg, (RecordConstructor) expr);
-        }
-        return null;
-    }
-
-    @Override
-    public Void visit(SelectRegular selectRegular, Boolean arg) throws AsterixException {
-        for (Projection projection : selectRegular.getProjections()) {
-            projection.accept(this, arg);
-        }
-        return null;
-    }
-
-    @Override
-    public Void visit(SelectSetOperation selectSetOperation, Boolean arg) throws AsterixException {
-        selectSetOperation.getLeftInput().accept(this, arg);
-        for (SetOperationRight right : selectSetOperation.getRightInputs()) {
-            right.getSetOperationRightInput().accept(this, arg);
-        }
-        return null;
-    }
-
-    @Override
-    public Void visit(SelectExpression selectExpression, Boolean arg) throws AsterixException {
-        scopeChecker.createNewScope();
-
-        // Visits let bindings.
-        if (selectExpression.hasLetClauses()) {
-            for (LetClause lc : selectExpression.getLetList()) {
-                lc.accept(this, arg);
-            }
-        }
-
-        // Visits selectSetOperation.
-        selectExpression.getSelectSetOperation().accept(this, arg);
-
-        // Visits order by.
-        if (selectExpression.hasOrderby()) {
-            selectExpression.getOrderbyClause().accept(this, arg);
-        }
-
-        // Visits limit.
-        if (selectExpression.hasLimit()) {
-            selectExpression.getLimitClause().accept(this, arg);
-        }
-
-        // Exits the scope that were entered within this select expression
-        scopeChecker.removeCurrentScope();
-        return null;
-    }
-
-    @Override
-    public Void visit(LetClause letClause, Boolean rewrite) throws AsterixException {
-        VariableSubstitutionEnvironment env = scopeChecker.getCurrentScope().getVarSubstitutionEnvironment();
-        if (rewrite) {
-            Expression newBindExpr = (Expression) SqlppVariableSubstitutionUtil
-                    .substituteVariableWithoutContext(letClause.getBindingExpr(), env);
-            letClause.setBindingExpr(newBindExpr);
-        }
-        letClause.getBindingExpr().accept(this, false);
-        // A let binding variable will override the alias to substitute.
-        scopeChecker.getCurrentScope().removeSymbolExpressionMapping(letClause.getVarExpr());
-        return null;
-    }
-
-    @Override
-    public Void visit(OrderbyClause oc, Boolean arg) throws AsterixException {
-        VariableSubstitutionEnvironment env = scopeChecker.getCurrentScope().getVarSubstitutionEnvironment();
-        List<Expression> orderExprs = new ArrayList<Expression>();
-        for (Expression orderExpr : oc.getOrderbyList()) {
-            orderExprs.add((Expression) SqlppVariableSubstitutionUtil.substituteVariableWithoutContext(orderExpr, env));
-            orderExpr.accept(this, arg);
-        }
-        oc.setOrderbyList(orderExprs);
-        return null;
-    }
-
-    @Override
-    public Void visit(GroupbyClause gc, Boolean arg) throws AsterixException {
-        VariableSubstitutionEnvironment env = scopeChecker.getCurrentScope().getVarSubstitutionEnvironment();
-        Map<VariableExpr, VariableExpr> oldGbyExprsToNewGbyVarMap = new HashMap<>();
-        for (GbyVariableExpressionPair gbyVarExpr : gc.getGbyPairList()) {
-            Expression oldGbyExpr = gbyVarExpr.getExpr();
-            Expression newExpr = (Expression) SqlppVariableSubstitutionUtil.substituteVariableWithoutContext(oldGbyExpr,
-                    env);
-            newExpr.accept(this, arg);
-            gbyVarExpr.setExpr(newExpr);
-            if (gbyVarExpr.getVar() == null) {
-                gbyVarExpr.setVar(new VariableExpr(context.newVariable()));
-            }
-            if (oldGbyExpr.getKind() == Kind.VARIABLE_EXPRESSION) {
-                VariableExpr oldGbyVarExpr = (VariableExpr) oldGbyExpr;
-                if (env.findSubstituion(oldGbyVarExpr) != null) {
-                    // Re-mapping that needs to be added.
-                    oldGbyExprsToNewGbyVarMap.put(oldGbyVarExpr, gbyVarExpr.getVar());
-                }
-            }
-        }
-        for (Entry<VariableExpr, VariableExpr> entry : oldGbyExprsToNewGbyVarMap.entrySet()) {
-            // The group-by key variable will override the alias to substitute.
-            scopeChecker.getCurrentScope().removeSymbolExpressionMapping(entry.getKey());
-            scopeChecker.getCurrentScope().addSymbolExpressionMappingToScope(entry.getKey(), entry.getValue());
-        }
-        return null;
-    }
-
-    @Override
-    public Void visit(LimitClause limitClause, Boolean arg) throws AsterixException {
-        limitClause.getLimitExpr().accept(this, arg);
-        return null;
-    }
-
-    @Override
-    public Void visit(HavingClause havingClause, Boolean arg) throws AsterixException {
-        VariableSubstitutionEnvironment env = scopeChecker.getCurrentScope().getVarSubstitutionEnvironment();
-        Expression newFilterExpr = (Expression) SqlppVariableSubstitutionUtil
-                .substituteVariableWithoutContext(havingClause.getFilterExpression(), env);
-        newFilterExpr.accept(this, arg);
-        havingClause.setFilterExpression(newFilterExpr);
-        return null;
-    }
-
-    @Override
-    public Void visit(Query q, Boolean arg) throws AsterixException {
-        q.getBody().accept(this, arg);
-        return null;
-    }
-
-    @Override
-    public Void visit(FunctionDecl fd, Boolean arg) throws AsterixException {
-        scopeChecker.createNewScope();
-        fd.getFuncBody().accept(this, arg);
-        scopeChecker.removeCurrentScope();
-        return null;
-    }
-
-    @Override
-    public Void visit(LiteralExpr l, Boolean arg) throws AsterixException {
-        return null;
-    }
-
-    @Override
-    public Void visit(ListConstructor lc, Boolean arg) throws AsterixException {
-        for (Expression expr : lc.getExprList()) {
-            expr.accept(this, arg);
-        }
-        return null;
-    }
-
-    @Override
-    public Void visit(RecordConstructor rc, Boolean rewrite) throws AsterixException {
-        for (FieldBinding binding : rc.getFbList()) {
-            binding.getLeftExpr().accept(this, false);
-            binding.getRightExpr().accept(this, false);
-        }
-        return null;
-    }
-
-    private void mapForRecordConstructor(Boolean initPhase, RecordConstructor rc) throws AsterixException {
-        for (FieldBinding binding : rc.getFbList()) {
-            Expression leftExpr = binding.getLeftExpr();
-            if (leftExpr.getKind() == Kind.LITERAL_EXPRESSION) {
-                LiteralExpr literalExpr = (LiteralExpr) leftExpr;
-                if (literalExpr.getValue().getLiteralType() == Literal.Type.STRING) {
-                    String fieldName = literalExpr.getValue().getStringValue();
-                    VariableExpr columnAlias = new VariableExpr(
-                            SqlppVariableUtil.toInternalVariableIdentifier(fieldName));
-                    VariableSubstitutionEnvironment env = scopeChecker.getCurrentScope()
-                            .getVarSubstitutionEnvironment();
-                    if (initPhase) {
-                        scopeChecker.getCurrentScope().addSymbolExpressionMappingToScope(columnAlias,
-                                binding.getRightExpr());
-                    } else {
-                        Expression gbyKey = (Expression) SqlppRewriteUtil.deepCopy(env.findSubstituion(columnAlias));
-                        if (gbyKey != null) {
-                            binding.setRightExpr(gbyKey);
-                        }
-                    }
-                }
-            }
-        }
-    }
-
-    @Override
-    public Void visit(OperatorExpr operatorExpr, Boolean arg) throws AsterixException {
-        for (Expression expr : operatorExpr.getExprList()) {
-            expr.accept(this, arg);
-        }
-        return null;
-    }
-
-    @Override
-    public Void visit(IfExpr ifExpr, Boolean arg) throws AsterixException {
-        ifExpr.getCondExpr().accept(this, arg);
-        ifExpr.getThenExpr().accept(this, arg);
-        ifExpr.getElseExpr().accept(this, arg);
-        return null;
-    }
-
-    @Override
-    public Void visit(QuantifiedExpression qe, Boolean arg) throws AsterixException {
-        for (QuantifiedPair pair : qe.getQuantifiedList()) {
-            pair.getExpr().accept(this, arg);
-        }
-        qe.getSatisfiesExpr().accept(this, arg);
-        return null;
-    }
-
-    @Override
-    public Void visit(CallExpr callExpr, Boolean arg) throws AsterixException {
-        for (Expression expr : callExpr.getExprList()) {
-            expr.accept(this, arg);
-        }
-        return null;
-    }
-
-    @Override
-    public Void visit(VariableExpr varExpr, Boolean arg) throws AsterixException {
-        return null;
-    }
-
-    @Override
-    public Void visit(UnaryExpr u, Boolean arg) throws AsterixException {
-        u.getExpr().accept(this, arg);
-        return null;
-    }
-
-    @Override
-    public Void visit(FieldAccessor fa, Boolean arg) throws AsterixException {
-        fa.getExpr().accept(this, arg);
-        return null;
-    }
-
-    @Override
-    public Void visit(IndexAccessor ia, Boolean arg) throws AsterixException {
-        ia.getExpr().accept(this, arg);
-        Expression indexExpr = ia.getExpr();
-        if (indexExpr != null) {
-            indexExpr.accept(this, arg);
-        }
-        return null;
-    }
-
-    private void removeSubsutitions(AbstractBinaryCorrelateClause unnestClause) {
-        scopeChecker.getCurrentScope().removeSymbolExpressionMapping(unnestClause.getRightVariable());
-        if (unnestClause.hasPositionalVariable()) {
-            scopeChecker.getCurrentScope().removeSymbolExpressionMapping(unnestClause.getPositionalVariable());
-        }
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d630d1a2/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/rewrites/visitor/SqlppBuiltinFunctionRewriteVisitor.java
----------------------------------------------------------------------
diff --git a/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/rewrites/visitor/SqlppBuiltinFunctionRewriteVisitor.java b/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/rewrites/visitor/SqlppBuiltinFunctionRewriteVisitor.java
deleted file mode 100644
index c7c7d11..0000000
--- a/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/rewrites/visitor/SqlppBuiltinFunctionRewriteVisitor.java
+++ /dev/null
@@ -1,46 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.asterix.lang.sqlpp.rewrites.visitor;
-
-import java.util.ArrayList;
-import java.util.List;
-
-import org.apache.asterix.common.exceptions.AsterixException;
-import org.apache.asterix.common.functions.FunctionSignature;
-import org.apache.asterix.lang.common.base.Expression;
-import org.apache.asterix.lang.common.expression.CallExpr;
-import org.apache.asterix.lang.sqlpp.util.FunctionMapUtil;
-import org.apache.asterix.lang.sqlpp.visitor.base.AbstractSqlppSimpleExpressionVisitor;
-
-public class SqlppBuiltinFunctionRewriteVisitor extends AbstractSqlppSimpleExpressionVisitor {
-
-    @Override
-    public Expression visit(CallExpr callExpr, Expression arg) throws AsterixException {
-        //TODO(buyingyi): rewrite SQL temporal functions
-        FunctionSignature functionSignature = callExpr.getFunctionSignature();
-        callExpr.setFunctionSignature(FunctionMapUtil.normalizeBuiltinFunctionSignature(functionSignature, true));
-        List<Expression> newExprList = new ArrayList<Expression>();
-        for (Expression expr : callExpr.getExprList()) {
-            newExprList.add(expr.accept(this, arg));
-        }
-        callExpr.setExprList(newExprList);
-        return callExpr;
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d630d1a2/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/rewrites/visitor/SqlppGlobalAggregationSugarVisitor.java
----------------------------------------------------------------------
diff --git a/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/rewrites/visitor/SqlppGlobalAggregationSugarVisitor.java b/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/rewrites/visitor/SqlppGlobalAggregationSugarVisitor.java
deleted file mode 100644
index ae629af..0000000
--- a/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/rewrites/visitor/SqlppGlobalAggregationSugarVisitor.java
+++ /dev/null
@@ -1,67 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.asterix.lang.sqlpp.rewrites.visitor;
-
-import java.util.ArrayList;
-import java.util.List;
-
-import org.apache.asterix.common.exceptions.AsterixException;
-import org.apache.asterix.lang.common.base.Expression;
-import org.apache.asterix.lang.common.base.ILangExpression;
-import org.apache.asterix.lang.common.clause.GroupbyClause;
-import org.apache.asterix.lang.common.expression.GbyVariableExpressionPair;
-import org.apache.asterix.lang.common.expression.LiteralExpr;
-import org.apache.asterix.lang.common.expression.VariableExpr;
-import org.apache.asterix.lang.common.literal.IntegerLiteral;
-import org.apache.asterix.lang.sqlpp.clause.SelectBlock;
-import org.apache.asterix.lang.sqlpp.clause.SelectClause;
-import org.apache.asterix.lang.sqlpp.visitor.CheckSql92AggregateVisitor;
-import org.apache.asterix.lang.sqlpp.visitor.base.AbstractSqlppSimpleExpressionVisitor;
-
-public class SqlppGlobalAggregationSugarVisitor extends AbstractSqlppSimpleExpressionVisitor {
-
-    @Override
-    public Expression visit(SelectBlock selectBlock, Expression arg) throws AsterixException {
-        SelectClause selectClause = selectBlock.getSelectClause();
-        if (!selectBlock.hasGroupbyClause() && selectBlock.hasFromClause()) {
-            boolean addImplicitGby = false;
-            if (selectClause.selectRegular()) {
-                addImplicitGby = isSql92Aggregate(selectClause.getSelectRegular(), selectBlock);
-            } else {
-                addImplicitGby = isSql92Aggregate(selectClause.getSelectElement(), selectBlock);
-            }
-            if (addImplicitGby) {
-                // Adds an implicit group-by clause for SQL-92 global aggregate.
-                List<GbyVariableExpressionPair> gbyPairList = new ArrayList<>();
-                gbyPairList.add(new GbyVariableExpressionPair(null, new LiteralExpr(new IntegerLiteral(1))));
-                List<GbyVariableExpressionPair> decorPairList = new ArrayList<>();
-                List<VariableExpr> withVarList = new ArrayList<>();
-                GroupbyClause gbyClause = new GroupbyClause(gbyPairList, decorPairList, withVarList, null, null, false,
-                        true);
-                selectBlock.setGroupbyClause(gbyClause);
-            }
-        }
-        return super.visit(selectBlock, arg);
-    }
-
-    private boolean isSql92Aggregate(ILangExpression expr, SelectBlock selectBlock) throws AsterixException {
-        CheckSql92AggregateVisitor visitor = new CheckSql92AggregateVisitor();
-        return expr.accept(visitor, selectBlock);
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d630d1a2/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/rewrites/visitor/SqlppGroupBySugarVisitor.java
----------------------------------------------------------------------
diff --git a/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/rewrites/visitor/SqlppGroupBySugarVisitor.java b/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/rewrites/visitor/SqlppGroupBySugarVisitor.java
deleted file mode 100644
index ae47264..0000000
--- a/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/rewrites/visitor/SqlppGroupBySugarVisitor.java
+++ /dev/null
@@ -1,123 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.asterix.lang.sqlpp.rewrites.visitor;
-
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-
-import org.apache.asterix.common.exceptions.AsterixException;
-import org.apache.asterix.common.functions.FunctionSignature;
-import org.apache.asterix.lang.common.base.Expression;
-import org.apache.asterix.lang.common.base.Expression.Kind;
-import org.apache.asterix.lang.common.expression.CallExpr;
-import org.apache.asterix.lang.common.expression.FieldAccessor;
-import org.apache.asterix.lang.common.expression.VariableExpr;
-import org.apache.asterix.lang.common.rewrites.LangRewritingContext;
-import org.apache.asterix.lang.sqlpp.clause.FromClause;
-import org.apache.asterix.lang.sqlpp.clause.FromTerm;
-import org.apache.asterix.lang.sqlpp.clause.SelectBlock;
-import org.apache.asterix.lang.sqlpp.clause.SelectClause;
-import org.apache.asterix.lang.sqlpp.clause.SelectElement;
-import org.apache.asterix.lang.sqlpp.clause.SelectSetOperation;
-import org.apache.asterix.lang.sqlpp.expression.SelectExpression;
-import org.apache.asterix.lang.sqlpp.struct.SetOperationInput;
-import org.apache.asterix.lang.sqlpp.util.FunctionMapUtil;
-import org.apache.asterix.lang.sqlpp.util.SqlppRewriteUtil;
-import org.apache.asterix.lang.sqlpp.util.SqlppVariableSubstitutionUtil;
-import org.apache.asterix.lang.sqlpp.util.SqlppVariableUtil;
-import org.apache.asterix.lang.sqlpp.visitor.base.AbstractSqlppExpressionScopingVisitor;
-
-/**
- * An AST pre-processor to rewrite group-by sugar queries.
- */
-public class SqlppGroupBySugarVisitor extends AbstractSqlppExpressionScopingVisitor {
-
-    private final Expression groupVar;
-    private final Collection<VariableExpr> targetVars;
-
-    public SqlppGroupBySugarVisitor(LangRewritingContext context, Expression groupVar,
-            Collection<VariableExpr> targetVars) {
-        super(context);
-        this.groupVar = groupVar;
-        this.targetVars = targetVars;
-    }
-
-    @Override
-    public Expression visit(CallExpr callExpr, Expression arg) throws AsterixException {
-        List<Expression> newExprList = new ArrayList<Expression>();
-        FunctionSignature signature = callExpr.getFunctionSignature();
-        boolean aggregate = FunctionMapUtil.isSql92AggregateFunction(signature)
-                || FunctionMapUtil.isCoreAggregateFunction(signature);
-        boolean rewritten = false;
-        for (Expression expr : callExpr.getExprList()) {
-            Expression newExpr = aggregate ? wrapAggregationArgument(expr) : expr;
-            rewritten |= newExpr != expr;
-            newExprList.add(newExpr.accept(this, arg));
-        }
-        if (rewritten) {
-            // Rewrites the SQL-92 function name to core functions.
-            callExpr.setFunctionSignature(FunctionMapUtil.sql92ToCoreAggregateFunction(signature));
-        }
-        callExpr.setExprList(newExprList);
-        return callExpr;
-    }
-
-    private Expression wrapAggregationArgument(Expression expr) throws AsterixException {
-        if (expr.getKind() == Kind.SELECT_EXPRESSION) {
-            return expr;
-        }
-        Set<VariableExpr> definedVars = scopeChecker.getCurrentScope().getLiveVariables();
-        Set<VariableExpr> vars = new HashSet<>(targetVars);
-        vars.remove(definedVars); // Exclude re-defined local variables.
-        Set<VariableExpr> freeVars = SqlppRewriteUtil.getFreeVariable(expr);
-        if (!vars.containsAll(freeVars)) {
-            return expr;
-        }
-
-        VariableExpr var = new VariableExpr(context.newVariable());
-        FromTerm fromTerm = new FromTerm(groupVar, var, null, null);
-        FromClause fromClause = new FromClause(Collections.singletonList(fromTerm));
-
-        // Select clause.
-        SelectElement selectElement = new SelectElement(expr);
-        SelectClause selectClause = new SelectClause(selectElement, null, false);
-
-        // Construct the select expression.
-        SelectBlock selectBlock = new SelectBlock(selectClause, fromClause, null, null, null, null, null);
-        SelectSetOperation selectSetOperation = new SelectSetOperation(new SetOperationInput(selectBlock, null), null);
-        SelectExpression selectExpression = new SelectExpression(null, selectSetOperation, null, null, false);
-        selectExpression.setSubquery(true);
-
-        // replace variable expressions with field access
-        Map<VariableExpr, Expression> varExprMap = new HashMap<>();
-        for (VariableExpr usedVar : freeVars) {
-            varExprMap.put(usedVar,
-                    new FieldAccessor(var, SqlppVariableUtil.toUserDefinedVariableName(usedVar.getVar())));
-        }
-        selectElement.setExpression(
-                (Expression) SqlppVariableSubstitutionUtil.substituteVariableWithoutContext(expr, varExprMap));
-        return selectExpression;
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d630d1a2/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/rewrites/visitor/SqlppGroupByVisitor.java
----------------------------------------------------------------------
diff --git a/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/rewrites/visitor/SqlppGroupByVisitor.java b/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/rewrites/visitor/SqlppGroupByVisitor.java
deleted file mode 100644
index c9e7a6e..0000000
--- a/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/rewrites/visitor/SqlppGroupByVisitor.java
+++ /dev/null
@@ -1,168 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.asterix.lang.sqlpp.rewrites.visitor;
-
-import java.util.ArrayList;
-import java.util.HashSet;
-import java.util.Iterator;
-import java.util.List;
-import java.util.Set;
-
-import org.apache.asterix.common.exceptions.AsterixException;
-import org.apache.asterix.lang.common.base.Expression;
-import org.apache.asterix.lang.common.clause.GroupbyClause;
-import org.apache.asterix.lang.common.clause.LetClause;
-import org.apache.asterix.lang.common.context.Scope;
-import org.apache.asterix.lang.common.expression.GbyVariableExpressionPair;
-import org.apache.asterix.lang.common.expression.VariableExpr;
-import org.apache.asterix.lang.common.rewrites.LangRewritingContext;
-import org.apache.asterix.lang.common.struct.Identifier;
-import org.apache.asterix.lang.common.struct.VarIdentifier;
-import org.apache.asterix.lang.sqlpp.clause.SelectBlock;
-import org.apache.asterix.lang.sqlpp.expression.SelectExpression;
-import org.apache.asterix.lang.sqlpp.util.SqlppRewriteUtil;
-import org.apache.asterix.lang.sqlpp.util.SqlppVariableUtil;
-import org.apache.asterix.lang.sqlpp.visitor.base.AbstractSqlppExpressionScopingVisitor;
-import org.apache.hyracks.algebricks.common.utils.Pair;
-
-/**
- * A pre-processor that adds the group variable as well as its group field
- * list into the AST. It will also invoke SQL group-by aggregation sugar rewritings.
- */
-public class SqlppGroupByVisitor extends AbstractSqlppExpressionScopingVisitor {
-
-    public SqlppGroupByVisitor(LangRewritingContext context) {
-        super(context);
-    }
-
-    @Override
-    public Expression visit(SelectBlock selectBlock, Expression arg) throws AsterixException {
-        // Traverses the select block in the order of "from", "let"s, "where",
-        // "group by", "let"s, "having" and "select".
-        if (selectBlock.hasFromClause()) {
-            selectBlock.getFromClause().accept(this, arg);
-        }
-        if (selectBlock.hasLetClauses()) {
-            List<LetClause> letList = selectBlock.getLetList();
-            for (LetClause letClause : letList) {
-                letClause.accept(this, arg);
-            }
-        }
-        if (selectBlock.hasWhereClause()) {
-            selectBlock.getWhereClause().accept(this, arg);
-        }
-        if (selectBlock.hasGroupbyClause()) {
-            selectBlock.getGroupbyClause().accept(this, arg);
-            Set<VariableExpr> withVarSet = new HashSet<>(selectBlock.getGroupbyClause().getWithVarList());
-            withVarSet.remove(selectBlock.getGroupbyClause().getGroupVar());
-            if (selectBlock.hasLetClausesAfterGroupby()) {
-                List<LetClause> letListAfterGby = selectBlock.getLetListAfterGroupby();
-                for (LetClause letClauseAfterGby : letListAfterGby) {
-                    // Rewrites each let clause after the group-by.
-                    SqlppRewriteUtil.rewriteExpressionUsingGroupVariable(selectBlock.getGroupbyClause().getGroupVar(),
-                            withVarSet, letClauseAfterGby, context);
-                    letClauseAfterGby.accept(this, arg);
-                }
-            }
-            if (selectBlock.hasHavingClause()) {
-                // Rewrites the having clause.
-                SqlppRewriteUtil.rewriteExpressionUsingGroupVariable(selectBlock.getGroupbyClause().getGroupVar(),
-                        withVarSet, selectBlock.getHavingClause(), context);
-                selectBlock.getHavingClause().accept(this, arg);
-            }
-            // Rewrites the select clause.
-            SqlppRewriteUtil.rewriteExpressionUsingGroupVariable(selectBlock.getGroupbyClause().getGroupVar(),
-                    withVarSet, selectBlock.getSelectClause(), context);
-
-            SelectExpression parentSelectExpression = (SelectExpression) arg;
-            if (parentSelectExpression.hasOrderby()) {
-                // Rewrites the order-by clause.
-                SqlppRewriteUtil.rewriteExpressionUsingGroupVariable(selectBlock.getGroupbyClause().getGroupVar(),
-                        withVarSet, parentSelectExpression.getOrderbyClause(), context);
-            }
-            if (parentSelectExpression.hasLimit()) {
-                // Rewrites the limit clause.
-                SqlppRewriteUtil.rewriteExpressionUsingGroupVariable(selectBlock.getGroupbyClause().getGroupVar(),
-                        withVarSet, parentSelectExpression.getLimitClause(), context);
-            }
-        }
-        selectBlock.getSelectClause().accept(this, arg);
-        return null;
-    }
-
-    @Override
-    public Expression visit(GroupbyClause gc, Expression arg) throws AsterixException {
-        Scope newScope = scopeChecker.extendCurrentScopeNoPush(true);
-        // Puts all group-by variables into the symbol set of the new scope.
-        for (GbyVariableExpressionPair gbyVarExpr : gc.getGbyPairList()) {
-            gbyVarExpr.setExpr(gbyVarExpr.getExpr().accept(this, arg));
-            VariableExpr gbyVar = gbyVarExpr.getVar();
-            if (gbyVar != null) {
-                newScope.addNewVarSymbolToScope(gbyVarExpr.getVar().getVar());
-            }
-        }
-        // Puts all live variables into withVarList.
-        List<VariableExpr> withVarList = new ArrayList<VariableExpr>();
-        Iterator<Identifier> varIterator = scopeChecker.getCurrentScope().liveSymbols();
-        while (varIterator.hasNext()) {
-            Identifier ident = varIterator.next();
-            VariableExpr varExpr = new VariableExpr();
-            if (ident instanceof VarIdentifier) {
-                varExpr.setIsNewVar(false);
-                varExpr.setVar((VarIdentifier) ident);
-                withVarList.add(varExpr);
-                newScope.addNewVarSymbolToScope((VarIdentifier) ident);
-            }
-        }
-
-        // Sets the field list for the group variable.
-        List<Pair<Expression, Identifier>> groupFieldList = new ArrayList<>();
-        if (!gc.hasGroupFieldList()) {
-            for (VariableExpr varExpr : withVarList) {
-                Pair<Expression, Identifier> varIdPair = new Pair<>(new VariableExpr(varExpr.getVar()),
-                        SqlppVariableUtil.toUserDefinedVariableName(varExpr.getVar()));
-                groupFieldList.add(varIdPair);
-            }
-            gc.setGroupFieldList(groupFieldList);
-        } else {
-            // Check the scopes of group field variables.
-            for (Pair<Expression, Identifier> groupField : gc.getGroupFieldList()) {
-                Expression newVar = groupField.first.accept(this, arg);
-                groupFieldList.add(new Pair<>(newVar, groupField.second));
-            }
-        }
-        gc.setGroupFieldList(groupFieldList);
-
-        // Sets the group variable.
-        if (!gc.hasGroupVar()) {
-            VariableExpr groupVar = new VariableExpr(context.newVariable());
-            gc.setGroupVar(groupVar);
-        }
-        newScope.addNewVarSymbolToScope(gc.getGroupVar().getVar());
-
-        // Adds the group variable into the "with" (i.e., re-binding) variable list.
-        VariableExpr gbyVarRef = new VariableExpr(gc.getGroupVar().getVar());
-        gbyVarRef.setIsNewVar(false);
-        withVarList.add(gbyVarRef);
-        gc.setWithVarList(withVarList);
-
-        scopeChecker.replaceCurrentScope(newScope);
-        return null;
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d630d1a2/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/rewrites/visitor/SqlppInlineUdfsVisitor.java
----------------------------------------------------------------------
diff --git a/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/rewrites/visitor/SqlppInlineUdfsVisitor.java b/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/rewrites/visitor/SqlppInlineUdfsVisitor.java
deleted file mode 100644
index e7832bb..0000000
--- a/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/rewrites/visitor/SqlppInlineUdfsVisitor.java
+++ /dev/null
@@ -1,237 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.asterix.lang.sqlpp.rewrites.visitor;
-
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-
-import org.apache.asterix.common.exceptions.AsterixException;
-import org.apache.asterix.lang.common.base.Expression;
-import org.apache.asterix.lang.common.base.IRewriterFactory;
-import org.apache.asterix.lang.common.clause.LetClause;
-import org.apache.asterix.lang.common.expression.VariableExpr;
-import org.apache.asterix.lang.common.rewrites.LangRewritingContext;
-import org.apache.asterix.lang.common.statement.FunctionDecl;
-import org.apache.asterix.lang.common.visitor.AbstractInlineUdfsVisitor;
-import org.apache.asterix.lang.sqlpp.clause.AbstractBinaryCorrelateClause;
-import org.apache.asterix.lang.sqlpp.clause.FromClause;
-import org.apache.asterix.lang.sqlpp.clause.FromTerm;
-import org.apache.asterix.lang.sqlpp.clause.HavingClause;
-import org.apache.asterix.lang.sqlpp.clause.JoinClause;
-import org.apache.asterix.lang.sqlpp.clause.NestClause;
-import org.apache.asterix.lang.sqlpp.clause.Projection;
-import org.apache.asterix.lang.sqlpp.clause.SelectBlock;
-import org.apache.asterix.lang.sqlpp.clause.SelectClause;
-import org.apache.asterix.lang.sqlpp.clause.SelectElement;
-import org.apache.asterix.lang.sqlpp.clause.SelectRegular;
-import org.apache.asterix.lang.sqlpp.clause.SelectSetOperation;
-import org.apache.asterix.lang.sqlpp.clause.UnnestClause;
-import org.apache.asterix.lang.sqlpp.expression.SelectExpression;
-import org.apache.asterix.lang.sqlpp.struct.SetOperationRight;
-import org.apache.asterix.lang.sqlpp.util.SqlppVariableSubstitutionUtil;
-import org.apache.asterix.lang.sqlpp.visitor.SqlppCloneAndSubstituteVariablesVisitor;
-import org.apache.asterix.lang.sqlpp.visitor.base.ISqlppVisitor;
-import org.apache.asterix.metadata.declared.AqlMetadataProvider;
-import org.apache.hyracks.algebricks.common.utils.Pair;
-
-public class SqlppInlineUdfsVisitor extends AbstractInlineUdfsVisitor
-        implements ISqlppVisitor<Boolean, List<FunctionDecl>> {
-
-    /**
-     * @param context,
-     *            manages ids of variables and guarantees uniqueness of variables.
-     * @param rewriterFactory,
-     *            a rewrite factory for rewriting user-defined functions.
-     * @param declaredFunctions,
-     *            a list of declared functions associated with the query.
-     * @param metadataProvider,
-     *            providing the definition of created (i.e., stored) user-defined functions.
-     */
-    public SqlppInlineUdfsVisitor(LangRewritingContext context, IRewriterFactory rewriterFactory,
-            List<FunctionDecl> declaredFunctions, AqlMetadataProvider metadataProvider) {
-        super(context, rewriterFactory, declaredFunctions, metadataProvider,
-                new SqlppCloneAndSubstituteVariablesVisitor(context));
-    }
-
-    @Override
-    protected Expression generateQueryExpression(List<LetClause> letClauses, Expression returnExpr)
-            throws AsterixException {
-        Map<VariableExpr, Expression> varExprMap = extractLetBindingVariableExpressionMappings(letClauses);
-        Expression inlinedReturnExpr = (Expression) SqlppVariableSubstitutionUtil
-                .substituteVariableWithoutContext(returnExpr, varExprMap);
-        return inlinedReturnExpr;
-    }
-
-    @Override
-    public Boolean visit(FromClause fromClause, List<FunctionDecl> func) throws AsterixException {
-        boolean changed = false;
-        for (FromTerm fromTerm : fromClause.getFromTerms()) {
-            changed |= fromTerm.accept(this, func);
-        }
-        return changed;
-    }
-
-    @Override
-    public Boolean visit(FromTerm fromTerm, List<FunctionDecl> func) throws AsterixException {
-        boolean changed = false;
-        Pair<Boolean, Expression> p = inlineUdfsInExpr(fromTerm.getLeftExpression(), func);
-        fromTerm.setLeftExpression(p.second);
-        changed |= p.first;
-        for (AbstractBinaryCorrelateClause correlateClause : fromTerm.getCorrelateClauses()) {
-            changed |= correlateClause.accept(this, func);
-        }
-        return changed;
-    }
-
-    @Override
-    public Boolean visit(JoinClause joinClause, List<FunctionDecl> funcs) throws AsterixException {
-        Pair<Boolean, Expression> p1 = inlineUdfsInExpr(joinClause.getRightExpression(), funcs);
-        joinClause.setRightExpression(p1.second);
-        Pair<Boolean, Expression> p2 = inlineUdfsInExpr(joinClause.getConditionExpression(), funcs);
-        joinClause.setConditionExpression(p2.second);
-        return p1.first || p2.first;
-    }
-
-    @Override
-    public Boolean visit(NestClause nestClause, List<FunctionDecl> funcs) throws AsterixException {
-        Pair<Boolean, Expression> p1 = inlineUdfsInExpr(nestClause.getRightExpression(), funcs);
-        nestClause.setRightExpression(p1.second);
-        Pair<Boolean, Expression> p2 = inlineUdfsInExpr(nestClause.getConditionExpression(), funcs);
-        nestClause.setConditionExpression(p2.second);
-        return p1.first || p2.first;
-    }
-
-    @Override
-    public Boolean visit(Projection projection, List<FunctionDecl> funcs) throws AsterixException {
-        Pair<Boolean, Expression> p = inlineUdfsInExpr(projection.getExpression(), funcs);
-        projection.setExpression(p.second);
-        return p.first;
-    }
-
-    @Override
-    public Boolean visit(SelectBlock selectBlock, List<FunctionDecl> funcs) throws AsterixException {
-        boolean changed = false;
-        if (selectBlock.hasFromClause()) {
-            changed |= selectBlock.getFromClause().accept(this, funcs);
-        }
-        if (selectBlock.hasLetClauses()) {
-            for (LetClause letClause : selectBlock.getLetList()) {
-                changed |= letClause.accept(this, funcs);
-            }
-        }
-        if (selectBlock.hasWhereClause()) {
-            changed |= selectBlock.getWhereClause().accept(this, funcs);
-        }
-        if (selectBlock.hasGroupbyClause()) {
-            changed |= selectBlock.getGroupbyClause().accept(this, funcs);
-        }
-        if (selectBlock.hasLetClausesAfterGroupby()) {
-            for (LetClause letClause : selectBlock.getLetListAfterGroupby()) {
-                changed |= letClause.accept(this, funcs);
-            }
-        }
-        if (selectBlock.hasHavingClause()) {
-            changed |= selectBlock.getHavingClause().accept(this, funcs);
-        }
-        changed |= selectBlock.getSelectClause().accept(this, funcs);
-        return changed;
-    }
-
-    @Override
-    public Boolean visit(SelectClause selectClause, List<FunctionDecl> funcs) throws AsterixException {
-        boolean changed = false;
-        if (selectClause.selectElement()) {
-            changed |= selectClause.getSelectElement().accept(this, funcs);
-        } else {
-            changed |= selectClause.getSelectRegular().accept(this, funcs);
-        }
-        return changed;
-    }
-
-    @Override
-    public Boolean visit(SelectElement selectElement, List<FunctionDecl> funcs) throws AsterixException {
-        Pair<Boolean, Expression> p = inlineUdfsInExpr(selectElement.getExpression(), funcs);
-        selectElement.setExpression(p.second);
-        return p.first;
-    }
-
-    @Override
-    public Boolean visit(SelectRegular selectRegular, List<FunctionDecl> funcs) throws AsterixException {
-        boolean changed = false;
-        for (Projection projection : selectRegular.getProjections()) {
-            changed |= projection.accept(this, funcs);
-        }
-        return changed;
-    }
-
-    @Override
-    public Boolean visit(SelectSetOperation selectSetOperation, List<FunctionDecl> funcs) throws AsterixException {
-        boolean changed = false;
-        changed |= selectSetOperation.getLeftInput().accept(this, funcs);
-        for (SetOperationRight right : selectSetOperation.getRightInputs()) {
-            changed |= right.getSetOperationRightInput().accept(this, funcs);
-        }
-        return changed;
-    }
-
-    @Override
-    public Boolean visit(SelectExpression selectExpression, List<FunctionDecl> funcs) throws AsterixException {
-        boolean changed = false;
-        if (selectExpression.hasLetClauses()) {
-            for (LetClause letClause : selectExpression.getLetList()) {
-                changed |= letClause.accept(this, funcs);
-            }
-        }
-        changed |= selectExpression.getSelectSetOperation().accept(this, funcs);
-        if (selectExpression.hasOrderby()) {
-            changed |= selectExpression.getOrderbyClause().accept(this, funcs);
-        }
-        if (selectExpression.hasLimit()) {
-            changed |= selectExpression.getLimitClause().accept(this, funcs);
-        }
-        return changed;
-    }
-
-    @Override
-    public Boolean visit(UnnestClause unnestClause, List<FunctionDecl> funcs) throws AsterixException {
-        Pair<Boolean, Expression> p = inlineUdfsInExpr(unnestClause.getRightExpression(), funcs);
-        unnestClause.setRightExpression(p.second);
-        return p.first;
-    }
-
-    @Override
-    public Boolean visit(HavingClause havingClause, List<FunctionDecl> funcs) throws AsterixException {
-        Pair<Boolean, Expression> p = inlineUdfsInExpr(havingClause.getFilterExpression(), funcs);
-        havingClause.setFilterExpression(p.second);
-        return p.first;
-    }
-
-    private Map<VariableExpr, Expression> extractLetBindingVariableExpressionMappings(List<LetClause> letClauses)
-            throws AsterixException {
-        Map<VariableExpr, Expression> varExprMap = new HashMap<VariableExpr, Expression>();
-        for (LetClause lc : letClauses) {
-            // inline let variables one by one iteratively.
-            lc.setBindingExpr((Expression) SqlppVariableSubstitutionUtil
-                    .substituteVariableWithoutContext(lc.getBindingExpr(), varExprMap));
-            varExprMap.put(lc.getVarExpr(), lc.getBindingExpr());
-        }
-        return varExprMap;
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d630d1a2/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/rewrites/visitor/VariableCheckAndRewriteVisitor.java
----------------------------------------------------------------------
diff --git a/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/rewrites/visitor/VariableCheckAndRewriteVisitor.java b/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/rewrites/visitor/VariableCheckAndRewriteVisitor.java
deleted file mode 100644
index 5ca2533..0000000
--- a/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/rewrites/visitor/VariableCheckAndRewriteVisitor.java
+++ /dev/null
@@ -1,102 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.asterix.lang.sqlpp.rewrites.visitor;
-
-import java.util.ArrayList;
-import java.util.List;
-
-import org.apache.asterix.common.config.MetadataConstants;
-import org.apache.asterix.common.exceptions.AsterixException;
-import org.apache.asterix.common.functions.FunctionSignature;
-import org.apache.asterix.lang.common.base.Expression;
-import org.apache.asterix.lang.common.expression.CallExpr;
-import org.apache.asterix.lang.common.expression.LiteralExpr;
-import org.apache.asterix.lang.common.expression.VariableExpr;
-import org.apache.asterix.lang.common.literal.StringLiteral;
-import org.apache.asterix.lang.common.rewrites.LangRewritingContext;
-import org.apache.asterix.lang.common.struct.Identifier;
-import org.apache.asterix.lang.common.struct.VarIdentifier;
-import org.apache.asterix.lang.sqlpp.util.SqlppVariableUtil;
-import org.apache.asterix.lang.sqlpp.visitor.base.AbstractSqlppExpressionScopingVisitor;
-import org.apache.asterix.metadata.declared.AqlMetadataProvider;
-
-public class VariableCheckAndRewriteVisitor extends AbstractSqlppExpressionScopingVisitor {
-
-    protected final boolean overwrite;
-    protected final AqlMetadataProvider metadataProvider;
-
-    /**
-     * @param context,
-     *            manages ids of variables and guarantees uniqueness of variables.
-     * @param overwrite,
-     *            whether rewrite unbounded variables to dataset function calls.
-     *            This flag can only be true for rewriting a top-level query.
-     *            It should be false for rewriting the body expression of a user-defined function.
-     */
-    public VariableCheckAndRewriteVisitor(LangRewritingContext context, boolean overwrite,
-            AqlMetadataProvider metadataProvider) {
-        super(context);
-        this.overwrite = overwrite;
-        this.metadataProvider = metadataProvider;
-    }
-
-    @Override
-    public Expression visit(VariableExpr varExpr, Expression arg) throws AsterixException {
-        String varName = varExpr.getVar().getValue();
-        if (scopeChecker.isInForbiddenScopes(varName)) {
-            throw new AsterixException(
-                    "Inside limit clauses, it is disallowed to reference a variable having the same name as any variable bound in the same scope as the limit clause.");
-        }
-        if (rewriteNeeded(varExpr)) {
-            return datasetRewrite(varExpr);
-        } else {
-            return varExpr;
-        }
-    }
-
-    // Whether a rewrite is needed for a variable reference expression.
-    private boolean rewriteNeeded(VariableExpr varExpr) throws AsterixException {
-        String varName = varExpr.getVar().getValue();
-        Identifier ident = scopeChecker.lookupSymbol(varName);
-        if (ident != null) {
-            // Exists such an identifier
-            varExpr.setIsNewVar(false);
-            varExpr.setVar((VarIdentifier) ident);
-            return false;
-        } else {
-            // Meets a undefined variable
-            return true;
-        }
-    }
-
-    // Rewrites for global variable (e.g., dataset) references.
-    private Expression datasetRewrite(VariableExpr expr) throws AsterixException {
-        if (!overwrite) {
-            return expr;
-        }
-        String funcName = "dataset";
-        String dataverse = MetadataConstants.METADATA_DATAVERSE_NAME;
-        FunctionSignature signature = new FunctionSignature(dataverse, funcName, 1);
-        List<Expression> argList = new ArrayList<Expression>();
-        //Ignore the parser-generated prefix "$" for a dataset.
-        String dataset = SqlppVariableUtil.toUserDefinedVariableName(expr.getVar()).getValue();
-        argList.add(new LiteralExpr(new StringLiteral(dataset)));
-        return new CallExpr(signature, argList);
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d630d1a2/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/util/FunctionMapUtil.java
----------------------------------------------------------------------
diff --git a/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/util/FunctionMapUtil.java b/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/util/FunctionMapUtil.java
deleted file mode 100644
index cbf05b5..0000000
--- a/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/util/FunctionMapUtil.java
+++ /dev/null
@@ -1,150 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.asterix.lang.sqlpp.util;
-
-import java.util.HashMap;
-import java.util.Map;
-
-import org.apache.asterix.common.exceptions.AsterixException;
-import org.apache.asterix.common.functions.FunctionConstants;
-import org.apache.asterix.common.functions.FunctionSignature;
-import org.apache.asterix.lang.common.util.FunctionUtil;
-import org.apache.asterix.om.functions.AsterixBuiltinFunctions;
-import org.apache.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
-import org.apache.hyracks.algebricks.core.algebra.functions.IFunctionInfo;
-
-public class FunctionMapUtil {
-
-    private final static String CORE_AGGREGATE_PREFIX = "coll_";
-
-    // Maps from a SQL function name to an AQL function name (i.e., AsterixDB internal name).
-    private static final Map<String, String> FUNCTION_NAME_MAP = new HashMap<>();
-
-    static {
-        FUNCTION_NAME_MAP.put("ceil", "ceiling"); //SQL: ceil,  AQL: ceiling
-        FUNCTION_NAME_MAP.put("length", "string-length"); // SQL: length,  AQL: string-length
-        FUNCTION_NAME_MAP.put("lower", "lowercase"); // SQL: lower, AQL: lowercase
-        FUNCTION_NAME_MAP.put("substr", "substring"); // SQL: substr,  AQL: substring
-        FUNCTION_NAME_MAP.put("upper", "uppercase"); //SQL: upper, AQL: uppercase
-    }
-
-    /**
-     * Whether a function signature is a SQL-92 core aggregate function.
-     *
-     * @param fs,
-     *            the function signature.
-     * @return true if the function signature is a SQL-92 core aggregate,
-     *         false otherwise.
-     */
-    public static boolean isSql92AggregateFunction(FunctionSignature signature) throws AsterixException {
-        IFunctionInfo finfo = FunctionUtil.getFunctionInfo(new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
-                signature.getName().toLowerCase(), signature.getArity()));
-        if (finfo == null) {
-            return false;
-        }
-        return AsterixBuiltinFunctions.getAggregateFunction(finfo.getFunctionIdentifier()) != null;
-    }
-
-    /**
-     * Whether a function signature is a SQL++ core aggregate function.
-     *
-     * @param fs,
-     *            the function signature.
-     * @return true if the function signature is a SQL++ core aggregate,
-     *         false otherwise.
-     */
-    public static boolean isCoreAggregateFunction(FunctionSignature fs) {
-        String name = fs.getName().toLowerCase();
-        if (!name.startsWith(CORE_AGGREGATE_PREFIX)) {
-            return false;
-        }
-        IFunctionInfo finfo = FunctionUtil.getFunctionInfo(new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
-                name.substring(CORE_AGGREGATE_PREFIX.length()), fs.getArity()));
-        if (finfo == null) {
-            return false;
-        }
-        return AsterixBuiltinFunctions.getAggregateFunction(finfo.getFunctionIdentifier()) != null;
-    }
-
-    /**
-     * Get the corresponding SQL++ core aggregate function from the SQL-92 aggregate function.
-     *
-     * @param fs,
-     *            the SQL-92 aggregate function signature.
-     * @return the SQL++ aggregate function signature.
-     * @throws AsterixException
-     */
-    public static FunctionSignature sql92ToCoreAggregateFunction(FunctionSignature fs) throws AsterixException {
-        if (!isSql92AggregateFunction(fs)) {
-            return fs;
-        }
-        return new FunctionSignature(fs.getNamespace(), CORE_AGGREGATE_PREFIX + fs.getName(), fs.getArity());
-    }
-
-    /**
-     * Maps a user invoked function signature to a system internal function signature.
-     *
-     * @param fs,
-     *            the user typed function.
-     * @return the system internal function.
-     */
-    public static FunctionSignature normalizeBuiltinFunctionSignature(FunctionSignature fs, boolean checkSql92Aggregate)
-            throws AsterixException {
-        String mappedName = internalizeBuiltinScalarFunctionName(fs.getName());
-        if (isCoreAggregateFunction(fs)) {
-            mappedName = internalizeCoreAggregateFunctionName(mappedName);
-        } else if (checkSql92Aggregate && isSql92AggregateFunction(fs)) {
-            throw new AsterixException(fs.getName()
-                    + " is a SQL-92 aggregate function. The SQL++ core aggregate function " + CORE_AGGREGATE_PREFIX
-                    + fs.getName().toLowerCase() + " could potentially express the intent.");
-        }
-        return new FunctionSignature(fs.getNamespace(), mappedName, fs.getArity());
-    }
-
-    /**
-     * Removes the "coll_" prefix for user-facing SQL++ core aggregate function names.
-     *
-     * @param name,
-     *            the name of a user-facing SQL++ core aggregate function name.
-     * @return the AsterixDB internal function name for the aggregate function.
-     * @throws AsterixException
-     */
-    private static String internalizeCoreAggregateFunctionName(String name) throws AsterixException {
-        String lowerCaseName = name.toLowerCase();
-        return lowerCaseName.substring(CORE_AGGREGATE_PREFIX.length());
-    }
-
-    /**
-     * Note: function name normalization can ONLY be called
-     * after all user-defined functions (by either "DECLARE FUNCTION" or "CREATE FUNCTION")
-     * are inlined, because user-defined function names are case-sensitive.
-     *
-     * @param name
-     *            the user-input function name in the query.
-     * @return the mapped internal name.
-     */
-    private static String internalizeBuiltinScalarFunctionName(String name) {
-        String lowerCaseName = name.toLowerCase();
-        String mappedName = FUNCTION_NAME_MAP.get(lowerCaseName);
-        if (mappedName != null) {
-            return mappedName;
-        }
-        return lowerCaseName;
-    }
-}



[05/50] [abbrv] incubator-asterixdb git commit: ASTERIXDB-1377: Prevent Operations from Exiting Components Multiple Times

Posted by im...@apache.org.
ASTERIXDB-1377: Prevent Operations from Exiting Components Multiple Times

Change-Id: I5907b1b4c76ce48366f5447f2940f3561c474cfe
Reviewed-on: https://asterix-gerrit.ics.uci.edu/769
Tested-by: Jenkins <je...@fulliautomatix.ics.uci.edu>
Reviewed-by: Yingyi Bu <bu...@gmail.com>


Project: http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/commit/c820f2cd
Tree: http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/tree/c820f2cd
Diff: http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/diff/c820f2cd

Branch: refs/heads/master
Commit: c820f2cd08df352cc1e7336e9b53e5a38cfcb6a3
Parents: 0661036
Author: Murtadha Hubail <mh...@uci.edu>
Authored: Fri Apr 1 05:23:25 2016 -0700
Committer: Murtadha Hubail <hu...@gmail.com>
Committed: Mon Apr 4 21:37:05 2016 -0700

----------------------------------------------------------------------
 .../lsm/btree/impls/ExternalBTreeOpContext.java |  5 ++-
 .../impls/ExternalBTreeWithBuddyOpContext.java  |  5 ++-
 .../am/lsm/btree/impls/LSMBTreeOpContext.java   |  5 ++-
 .../common/api/ILSMIndexOperationContext.java   |  7 ++++
 .../impls/AbstractLSMIndexOperationContext.java | 41 ++++++++++++++++++++
 .../lsm/common/impls/ExternalIndexHarness.java  | 27 +++++++++----
 .../storage/am/lsm/common/impls/LSMHarness.java | 17 ++++++++
 .../lsm/common/impls/LSMIndexSearchCursor.java  |  4 +-
 .../impls/LSMInvertedIndexOpContext.java        |  5 ++-
 .../lsm/rtree/impls/ExternalRTreeOpContext.java |  5 ++-
 .../am/lsm/rtree/impls/LSMRTreeOpContext.java   |  5 ++-
 11 files changed, 105 insertions(+), 21 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/c820f2cd/hyracks/hyracks-storage-am-lsm-btree/src/main/java/org/apache/hyracks/storage/am/lsm/btree/impls/ExternalBTreeOpContext.java
----------------------------------------------------------------------
diff --git a/hyracks/hyracks-storage-am-lsm-btree/src/main/java/org/apache/hyracks/storage/am/lsm/btree/impls/ExternalBTreeOpContext.java b/hyracks/hyracks-storage-am-lsm-btree/src/main/java/org/apache/hyracks/storage/am/lsm/btree/impls/ExternalBTreeOpContext.java
index d63671e..29fedef 100644
--- a/hyracks/hyracks-storage-am-lsm-btree/src/main/java/org/apache/hyracks/storage/am/lsm/btree/impls/ExternalBTreeOpContext.java
+++ b/hyracks/hyracks-storage-am-lsm-btree/src/main/java/org/apache/hyracks/storage/am/lsm/btree/impls/ExternalBTreeOpContext.java
@@ -31,9 +31,9 @@ import org.apache.hyracks.storage.am.common.ophelpers.IndexOperation;
 import org.apache.hyracks.storage.am.common.ophelpers.MultiComparator;
 import org.apache.hyracks.storage.am.lsm.common.api.ILSMComponent;
 import org.apache.hyracks.storage.am.lsm.common.api.ILSMHarness;
-import org.apache.hyracks.storage.am.lsm.common.api.ILSMIndexOperationContext;
+import org.apache.hyracks.storage.am.lsm.common.impls.AbstractLSMIndexOperationContext;
 
-public class ExternalBTreeOpContext implements ILSMIndexOperationContext {
+public class ExternalBTreeOpContext extends AbstractLSMIndexOperationContext {
     public ITreeIndexFrameFactory insertLeafFrameFactory;
     public ITreeIndexFrameFactory deleteLeafFrameFactory;
     public IBTreeLeafFrame insertLeafFrame;
@@ -86,6 +86,7 @@ public class ExternalBTreeOpContext implements ILSMIndexOperationContext {
 
     @Override
     public void reset() {
+        super.reset();
         componentHolder.clear();
         componentsToBeMerged.clear();
         componentsToBeReplicated.clear();

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/c820f2cd/hyracks/hyracks-storage-am-lsm-btree/src/main/java/org/apache/hyracks/storage/am/lsm/btree/impls/ExternalBTreeWithBuddyOpContext.java
----------------------------------------------------------------------
diff --git a/hyracks/hyracks-storage-am-lsm-btree/src/main/java/org/apache/hyracks/storage/am/lsm/btree/impls/ExternalBTreeWithBuddyOpContext.java b/hyracks/hyracks-storage-am-lsm-btree/src/main/java/org/apache/hyracks/storage/am/lsm/btree/impls/ExternalBTreeWithBuddyOpContext.java
index a837301..c44f529 100644
--- a/hyracks/hyracks-storage-am-lsm-btree/src/main/java/org/apache/hyracks/storage/am/lsm/btree/impls/ExternalBTreeWithBuddyOpContext.java
+++ b/hyracks/hyracks-storage-am-lsm-btree/src/main/java/org/apache/hyracks/storage/am/lsm/btree/impls/ExternalBTreeWithBuddyOpContext.java
@@ -31,9 +31,9 @@ import org.apache.hyracks.storage.am.common.ophelpers.IndexOperation;
 import org.apache.hyracks.storage.am.common.ophelpers.MultiComparator;
 import org.apache.hyracks.storage.am.lsm.common.api.ILSMComponent;
 import org.apache.hyracks.storage.am.lsm.common.api.ILSMHarness;
-import org.apache.hyracks.storage.am.lsm.common.api.ILSMIndexOperationContext;
+import org.apache.hyracks.storage.am.lsm.common.impls.AbstractLSMIndexOperationContext;
 
-public class ExternalBTreeWithBuddyOpContext implements ILSMIndexOperationContext {
+public class ExternalBTreeWithBuddyOpContext extends AbstractLSMIndexOperationContext {
     private IndexOperation op;
     private MultiComparator bTreeCmp;
     private MultiComparator buddyBTreeCmp;
@@ -74,6 +74,7 @@ public class ExternalBTreeWithBuddyOpContext implements ILSMIndexOperationContex
 
     @Override
     public void reset() {
+        super.reset();
         componentHolder.clear();
         componentsToBeMerged.clear();
         componentsToBeReplicated.clear();

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/c820f2cd/hyracks/hyracks-storage-am-lsm-btree/src/main/java/org/apache/hyracks/storage/am/lsm/btree/impls/LSMBTreeOpContext.java
----------------------------------------------------------------------
diff --git a/hyracks/hyracks-storage-am-lsm-btree/src/main/java/org/apache/hyracks/storage/am/lsm/btree/impls/LSMBTreeOpContext.java b/hyracks/hyracks-storage-am-lsm-btree/src/main/java/org/apache/hyracks/storage/am/lsm/btree/impls/LSMBTreeOpContext.java
index e833283..31c9d40 100644
--- a/hyracks/hyracks-storage-am-lsm-btree/src/main/java/org/apache/hyracks/storage/am/lsm/btree/impls/LSMBTreeOpContext.java
+++ b/hyracks/hyracks-storage-am-lsm-btree/src/main/java/org/apache/hyracks/storage/am/lsm/btree/impls/LSMBTreeOpContext.java
@@ -38,9 +38,9 @@ import org.apache.hyracks.storage.am.common.ophelpers.MultiComparator;
 import org.apache.hyracks.storage.am.common.tuples.PermutingTupleReference;
 import org.apache.hyracks.storage.am.lsm.common.api.ILSMComponent;
 import org.apache.hyracks.storage.am.lsm.common.api.ILSMHarness;
-import org.apache.hyracks.storage.am.lsm.common.api.ILSMIndexOperationContext;
+import org.apache.hyracks.storage.am.lsm.common.impls.AbstractLSMIndexOperationContext;
 
-public final class LSMBTreeOpContext implements ILSMIndexOperationContext {
+public final class LSMBTreeOpContext extends AbstractLSMIndexOperationContext {
 
     public ITreeIndexFrameFactory insertLeafFrameFactory;
     public ITreeIndexFrameFactory deleteLeafFrameFactory;
@@ -145,6 +145,7 @@ public final class LSMBTreeOpContext implements ILSMIndexOperationContext {
 
     @Override
     public void reset() {
+        super.reset();
         componentHolder.clear();
         componentsToBeMerged.clear();
         componentsToBeReplicated.clear();

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/c820f2cd/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/api/ILSMIndexOperationContext.java
----------------------------------------------------------------------
diff --git a/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/api/ILSMIndexOperationContext.java b/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/api/ILSMIndexOperationContext.java
index 99f981d..acf2233 100644
--- a/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/api/ILSMIndexOperationContext.java
+++ b/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/api/ILSMIndexOperationContext.java
@@ -41,4 +41,11 @@ public interface ILSMIndexOperationContext extends IIndexOperationContext {
     public ISearchPredicate getSearchPredicate();
 
     public List<ILSMComponent> getComponentsToBeReplicated();
+
+    /**
+     * @return true if this operation entered the components. Otherwise false.
+     */
+    public boolean isAccessingComponents();
+
+    public void setAccessingComponents(boolean accessingComponents);
 }

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/c820f2cd/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/AbstractLSMIndexOperationContext.java
----------------------------------------------------------------------
diff --git a/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/AbstractLSMIndexOperationContext.java b/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/AbstractLSMIndexOperationContext.java
new file mode 100644
index 0000000..3b907c3
--- /dev/null
+++ b/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/AbstractLSMIndexOperationContext.java
@@ -0,0 +1,41 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.hyracks.storage.am.lsm.common.impls;
+
+import org.apache.hyracks.storage.am.lsm.common.api.ILSMIndexOperationContext;
+
+public abstract class AbstractLSMIndexOperationContext implements ILSMIndexOperationContext {
+
+    private boolean accessingComponents = false;
+
+    @Override
+    public boolean isAccessingComponents() {
+        return accessingComponents;
+    }
+
+    @Override
+    public void setAccessingComponents(boolean accessingComponents) {
+        this.accessingComponents = accessingComponents;
+    }
+
+    @Override
+    public void reset() {
+        accessingComponents = false;
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/c820f2cd/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/ExternalIndexHarness.java
----------------------------------------------------------------------
diff --git a/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/ExternalIndexHarness.java b/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/ExternalIndexHarness.java
index 8e39223..e4be66b 100644
--- a/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/ExternalIndexHarness.java
+++ b/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/ExternalIndexHarness.java
@@ -42,14 +42,15 @@ import org.apache.hyracks.storage.am.lsm.common.api.LSMOperationType;
 public class ExternalIndexHarness extends LSMHarness {
     private static final Logger LOGGER = Logger.getLogger(ExternalIndexHarness.class.getName());
 
-    public ExternalIndexHarness(ILSMIndexInternal lsmIndex, ILSMMergePolicy mergePolicy,
-            ILSMOperationTracker opTracker, boolean replicationEnabled) {
+    public ExternalIndexHarness(ILSMIndexInternal lsmIndex, ILSMMergePolicy mergePolicy, ILSMOperationTracker opTracker,
+            boolean replicationEnabled) {
         super(lsmIndex, mergePolicy, opTracker, replicationEnabled);
     }
 
     @Override
     protected boolean getAndEnterComponents(ILSMIndexOperationContext ctx, LSMOperationType opType,
             boolean isTryOperation) throws HyracksDataException {
+        validateOperationEnterComponentsState(ctx);
         synchronized (opTracker) {
             while (true) {
                 lsmIndex.getOperationalComponents(ctx);
@@ -75,6 +76,7 @@ public class ExternalIndexHarness extends LSMHarness {
     @Override
     protected boolean enterComponents(ILSMIndexOperationContext ctx, LSMOperationType opType)
             throws HyracksDataException {
+        validateOperationEnterComponentsState(ctx);
         List<ILSMComponent> components = ctx.getComponentHolder();
         int numEntered = 0;
         boolean entranceSuccessful = false;
@@ -97,6 +99,7 @@ public class ExternalIndexHarness extends LSMHarness {
                 }
                 return false;
             }
+            ctx.setAccessingComponents(true);
         }
         // Check if there is any action that is needed to be taken based on the operation type
         switch (opType) {
@@ -111,6 +114,13 @@ public class ExternalIndexHarness extends LSMHarness {
 
     private void exitComponents(ILSMIndexOperationContext ctx, LSMOperationType opType, ILSMComponent newComponent,
             boolean failedOperation) throws HyracksDataException, IndexException {
+        /**
+         * FLUSH and MERGE operations should always exit the components
+         * to notify waiting threads.
+         */
+        if (!ctx.isAccessingComponents() && opType != LSMOperationType.FLUSH && opType != LSMOperationType.MERGE) {
+            return;
+        }
         synchronized (opTracker) {
             try {
                 // First check if there is any action that is needed to be taken based on the state of each component.
@@ -130,6 +140,7 @@ public class ExternalIndexHarness extends LSMHarness {
                             break;
                     }
                 }
+                ctx.setAccessingComponents(false);
                 // Then, perform any action that is needed to be taken based on the operation type.
                 switch (opType) {
                     case MERGE:
@@ -156,8 +167,8 @@ public class ExternalIndexHarness extends LSMHarness {
     }
 
     @Override
-    public void forceModify(ILSMIndexOperationContext ctx, ITupleReference tuple) throws HyracksDataException,
-            IndexException {
+    public void forceModify(ILSMIndexOperationContext ctx, ITupleReference tuple)
+            throws HyracksDataException, IndexException {
         throw new IndexException("2PC LSM Inedx doesn't support modify");
     }
 
@@ -216,8 +227,8 @@ public class ExternalIndexHarness extends LSMHarness {
     }
 
     @Override
-    public void merge(ILSMIndexOperationContext ctx, ILSMIOOperation operation) throws HyracksDataException,
-            IndexException {
+    public void merge(ILSMIndexOperationContext ctx, ILSMIOOperation operation)
+            throws HyracksDataException, IndexException {
         if (LOGGER.isLoggable(Level.INFO)) {
             LOGGER.info("Started a merge operation for index: " + lsmIndex + " ...");
         }
@@ -293,8 +304,8 @@ public class ExternalIndexHarness extends LSMHarness {
     }
 
     @Override
-    public void flush(ILSMIndexOperationContext ctx, ILSMIOOperation operation) throws HyracksDataException,
-            IndexException {
+    public void flush(ILSMIndexOperationContext ctx, ILSMIOOperation operation)
+            throws HyracksDataException, IndexException {
     }
 
     @Override

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/c820f2cd/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/LSMHarness.java
----------------------------------------------------------------------
diff --git a/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/LSMHarness.java b/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/LSMHarness.java
index 0224c5c..a19532f 100644
--- a/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/LSMHarness.java
+++ b/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/LSMHarness.java
@@ -72,6 +72,7 @@ public class LSMHarness implements ILSMHarness {
 
     protected boolean getAndEnterComponents(ILSMIndexOperationContext ctx, LSMOperationType opType,
             boolean isTryOperation) throws HyracksDataException {
+        validateOperationEnterComponentsState(ctx);
         synchronized (opTracker) {
             while (true) {
                 lsmIndex.getOperationalComponents(ctx);
@@ -133,6 +134,7 @@ public class LSMHarness implements ILSMHarness {
 
     protected boolean enterComponents(ILSMIndexOperationContext ctx, LSMOperationType opType)
             throws HyracksDataException {
+        validateOperationEnterComponentsState(ctx);
         List<ILSMComponent> components = ctx.getComponentHolder();
         int numEntered = 0;
         boolean entranceSuccessful = false;
@@ -162,6 +164,7 @@ public class LSMHarness implements ILSMHarness {
                 }
                 return false;
             }
+            ctx.setAccessingComponents(true);
         }
         // Check if there is any action that is needed to be taken based on the operation type
         switch (opType) {
@@ -185,6 +188,13 @@ public class LSMHarness implements ILSMHarness {
 
     private void exitComponents(ILSMIndexOperationContext ctx, LSMOperationType opType, ILSMComponent newComponent,
             boolean failedOperation) throws HyracksDataException, IndexException {
+        /**
+         * FLUSH and MERGE operations should always exit the components
+         * to notify waiting threads.
+         */
+        if (!ctx.isAccessingComponents() && opType != LSMOperationType.FLUSH && opType != LSMOperationType.MERGE) {
+            return;
+        }
         List<ILSMComponent> inactiveDiskComponents = null;
         List<ILSMComponent> inactiveDiskComponentsToBeDeleted = null;
         try {
@@ -225,6 +235,7 @@ public class LSMHarness implements ILSMHarness {
                         }
                         i++;
                     }
+                    ctx.setAccessingComponents(false);
                     // Then, perform any action that is needed to be taken based on the operation type.
                     switch (opType) {
                         case FLUSH:
@@ -506,4 +517,10 @@ public class LSMHarness implements ILSMHarness {
             throw new HyracksDataException(e);
         }
     }
+
+    protected void validateOperationEnterComponentsState(ILSMIndexOperationContext ctx) throws HyracksDataException {
+        if (ctx.isAccessingComponents()) {
+            throw new HyracksDataException("Opeartion already has access to components of index " + lsmIndex);
+        }
+    }
 }

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/c820f2cd/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/LSMIndexSearchCursor.java
----------------------------------------------------------------------
diff --git a/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/LSMIndexSearchCursor.java b/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/LSMIndexSearchCursor.java
index c4d2fcc..befdd85 100644
--- a/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/LSMIndexSearchCursor.java
+++ b/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/LSMIndexSearchCursor.java
@@ -141,7 +141,9 @@ public abstract class LSMIndexSearchCursor implements ITreeIndexCursor {
     @Override
     public void close() throws HyracksDataException {
         try {
-            outputPriorityQueue.clear();
+            if (outputPriorityQueue != null) {
+                outputPriorityQueue.clear();
+            }
             for (int i = 0; i < rangeCursors.length; i++) {
                 rangeCursors[i].close();
             }

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/c820f2cd/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/impls/LSMInvertedIndexOpContext.java
----------------------------------------------------------------------
diff --git a/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/impls/LSMInvertedIndexOpContext.java b/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/impls/LSMInvertedIndexOpContext.java
index c511a67..828e296 100644
--- a/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/impls/LSMInvertedIndexOpContext.java
+++ b/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/impls/LSMInvertedIndexOpContext.java
@@ -32,10 +32,10 @@ import org.apache.hyracks.storage.am.common.ophelpers.IndexOperation;
 import org.apache.hyracks.storage.am.common.ophelpers.MultiComparator;
 import org.apache.hyracks.storage.am.common.tuples.PermutingTupleReference;
 import org.apache.hyracks.storage.am.lsm.common.api.ILSMComponent;
-import org.apache.hyracks.storage.am.lsm.common.api.ILSMIndexOperationContext;
+import org.apache.hyracks.storage.am.lsm.common.impls.AbstractLSMIndexOperationContext;
 import org.apache.hyracks.storage.am.lsm.invertedindex.api.IInvertedIndexAccessor;
 
-public class LSMInvertedIndexOpContext implements ILSMIndexOperationContext {
+public class LSMInvertedIndexOpContext extends AbstractLSMIndexOperationContext {
 
     private static final int NUM_DOCUMENT_FIELDS = 1;
 
@@ -109,6 +109,7 @@ public class LSMInvertedIndexOpContext implements ILSMIndexOperationContext {
 
     @Override
     public void reset() {
+        super.reset();
         componentHolder.clear();
         componentsToBeMerged.clear();
         componentsToBeReplicated.clear();

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/c820f2cd/hyracks/hyracks-storage-am-lsm-rtree/src/main/java/org/apache/hyracks/storage/am/lsm/rtree/impls/ExternalRTreeOpContext.java
----------------------------------------------------------------------
diff --git a/hyracks/hyracks-storage-am-lsm-rtree/src/main/java/org/apache/hyracks/storage/am/lsm/rtree/impls/ExternalRTreeOpContext.java b/hyracks/hyracks-storage-am-lsm-rtree/src/main/java/org/apache/hyracks/storage/am/lsm/rtree/impls/ExternalRTreeOpContext.java
index 6a9a640..358a42a 100644
--- a/hyracks/hyracks-storage-am-lsm-rtree/src/main/java/org/apache/hyracks/storage/am/lsm/rtree/impls/ExternalRTreeOpContext.java
+++ b/hyracks/hyracks-storage-am-lsm-rtree/src/main/java/org/apache/hyracks/storage/am/lsm/rtree/impls/ExternalRTreeOpContext.java
@@ -30,9 +30,9 @@ import org.apache.hyracks.storage.am.common.ophelpers.IndexOperation;
 import org.apache.hyracks.storage.am.common.ophelpers.MultiComparator;
 import org.apache.hyracks.storage.am.lsm.common.api.ILSMComponent;
 import org.apache.hyracks.storage.am.lsm.common.api.ILSMHarness;
-import org.apache.hyracks.storage.am.lsm.common.api.ILSMIndexOperationContext;
+import org.apache.hyracks.storage.am.lsm.common.impls.AbstractLSMIndexOperationContext;
 
-public class ExternalRTreeOpContext implements ILSMIndexOperationContext {
+public class ExternalRTreeOpContext extends AbstractLSMIndexOperationContext {
     private IndexOperation op;
     private MultiComparator bTreeCmp;
     private MultiComparator rTreeCmp;
@@ -74,6 +74,7 @@ public class ExternalRTreeOpContext implements ILSMIndexOperationContext {
 
     @Override
     public void reset() {
+        super.reset();
         componentHolder.clear();
         componentsToBeMerged.clear();
         componentsToBeReplicated.clear();

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/c820f2cd/hyracks/hyracks-storage-am-lsm-rtree/src/main/java/org/apache/hyracks/storage/am/lsm/rtree/impls/LSMRTreeOpContext.java
----------------------------------------------------------------------
diff --git a/hyracks/hyracks-storage-am-lsm-rtree/src/main/java/org/apache/hyracks/storage/am/lsm/rtree/impls/LSMRTreeOpContext.java b/hyracks/hyracks-storage-am-lsm-rtree/src/main/java/org/apache/hyracks/storage/am/lsm/rtree/impls/LSMRTreeOpContext.java
index 686cd2b..62f572f 100644
--- a/hyracks/hyracks-storage-am-lsm-rtree/src/main/java/org/apache/hyracks/storage/am/lsm/rtree/impls/LSMRTreeOpContext.java
+++ b/hyracks/hyracks-storage-am-lsm-rtree/src/main/java/org/apache/hyracks/storage/am/lsm/rtree/impls/LSMRTreeOpContext.java
@@ -35,11 +35,11 @@ import org.apache.hyracks.storage.am.common.ophelpers.MultiComparator;
 import org.apache.hyracks.storage.am.common.tuples.PermutingTupleReference;
 import org.apache.hyracks.storage.am.lsm.common.api.ILSMComponent;
 import org.apache.hyracks.storage.am.lsm.common.api.ILSMHarness;
-import org.apache.hyracks.storage.am.lsm.common.api.ILSMIndexOperationContext;
+import org.apache.hyracks.storage.am.lsm.common.impls.AbstractLSMIndexOperationContext;
 import org.apache.hyracks.storage.am.rtree.impls.RTree;
 import org.apache.hyracks.storage.am.rtree.impls.RTreeOpContext;
 
-public final class LSMRTreeOpContext implements ILSMIndexOperationContext {
+public final class LSMRTreeOpContext extends AbstractLSMIndexOperationContext {
 
     public RTree.RTreeAccessor[] mutableRTreeAccessors;
     public RTree.RTreeAccessor currentMutableRTreeAccessor;
@@ -131,6 +131,7 @@ public final class LSMRTreeOpContext implements ILSMIndexOperationContext {
 
     @Override
     public void reset() {
+        super.reset();
         componentHolder.clear();
         componentsToBeMerged.clear();
     }


[04/50] [abbrv] incubator-asterixdb git commit: Enable Remove Quietly for Remove Directory Jobs

Posted by im...@apache.org.
Enable Remove Quietly for Remove Directory Jobs

Change-Id: I00a60bf4ac6d64295224c2cebc28fb22f92e77fd
Reviewed-on: https://asterix-gerrit.ics.uci.edu/775
Tested-by: Jenkins <je...@fulliautomatix.ics.uci.edu>
Reviewed-by: Murtadha Hubail <hu...@gmail.com>


Project: http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/commit/06610363
Tree: http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/tree/06610363
Diff: http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/diff/06610363

Branch: refs/heads/master
Commit: 0661036306c80ee45ed3f60f91ea38af46a64cff
Parents: 08aa051
Author: Abdullah Alamoudi <ba...@gmail.com>
Authored: Mon Apr 4 09:45:29 2016 +0300
Committer: abdullah alamoudi <ba...@gmail.com>
Committed: Mon Apr 4 01:30:50 2016 -0700

----------------------------------------------------------------------
 .../std/file/FileRemoveOperatorDescriptor.java     | 17 ++++++++++++-----
 1 file changed, 12 insertions(+), 5 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/06610363/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/file/FileRemoveOperatorDescriptor.java
----------------------------------------------------------------------
diff --git a/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/file/FileRemoveOperatorDescriptor.java b/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/file/FileRemoveOperatorDescriptor.java
index cf2008c..43b1b34 100644
--- a/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/file/FileRemoveOperatorDescriptor.java
+++ b/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/file/FileRemoveOperatorDescriptor.java
@@ -37,10 +37,13 @@ import org.apache.hyracks.dataflow.std.base.AbstractSingleActivityOperatorDescri
 public class FileRemoveOperatorDescriptor extends AbstractSingleActivityOperatorDescriptor {
 
     private final IFileSplitProvider fileSplitProvider;
+    private final boolean quietly;
 
-    public FileRemoveOperatorDescriptor(IOperatorDescriptorRegistry spec, IFileSplitProvider fileSplitProvder) {
+    public FileRemoveOperatorDescriptor(IOperatorDescriptorRegistry spec, IFileSplitProvider fileSplitProvder,
+            boolean quietly) {
         super(spec, 0, 0);
         this.fileSplitProvider = fileSplitProvder;
+        this.quietly = quietly;
     }
 
     private static final long serialVersionUID = 1L;
@@ -62,10 +65,14 @@ public class FileRemoveOperatorDescriptor extends AbstractSingleActivityOperator
             @Override
             public void initialize() throws HyracksDataException {
                 File f = ioManager.getAbsoluteFileRef(deviceId, path).getFile();
-                try {
-                    FileUtils.deleteDirectory(f);
-                } catch (IOException e) {
-                    throw new HyracksDataException(e);
+                if (quietly) {
+                    FileUtils.deleteQuietly(f);
+                } else {
+                    try {
+                        FileUtils.deleteDirectory(f);
+                    } catch (IOException e) {
+                        throw new HyracksDataException(e);
+                    }
                 }
             }
 


[19/50] [abbrv] incubator-asterixdb git commit: Merge branch 'master' into hyracks-merge2

Posted by im...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/8516517e/asterixdb/asterix-app/src/test/resources/runtimets/testsuite_sqlpp.xml
----------------------------------------------------------------------
diff --cc asterixdb/asterix-app/src/test/resources/runtimets/testsuite_sqlpp.xml
index c343570,0000000..f9d1a0e
mode 100644,000000..100644
--- a/asterixdb/asterix-app/src/test/resources/runtimets/testsuite_sqlpp.xml
+++ b/asterixdb/asterix-app/src/test/resources/runtimets/testsuite_sqlpp.xml
@@@ -1,7067 -1,0 +1,7112 @@@
 +<!--
 + ! Licensed to the Apache Software Foundation (ASF) under one
 + ! or more contributor license agreements.  See the NOTICE file
 + ! distributed with this work for additional information
 + ! regarding copyright ownership.  The ASF licenses this file
 + ! to you under the Apache License, Version 2.0 (the
 + ! "License"); you may not use this file except in compliance
 + ! with the License.  You may obtain a copy of the License at
 + !
 + !   http://www.apache.org/licenses/LICENSE-2.0
 + !
 + ! Unless required by applicable law or agreed to in writing,
 + ! software distributed under the License is distributed on an
 + ! "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + ! KIND, either express or implied.  See the License for the
 + ! specific language governing permissions and limitations
 + ! under the License.
 + !-->
 +<!DOCTYPE test-suite [
 +
 +        <!ENTITY RecordsQueries SYSTEM "queries_sqlpp/records/RecordsQueries.xml">
 +
 +        ]>
 +<test-suite
 +        xmlns="urn:xml.testframework.asterix.apache.org"
 +        ResultOffsetPath="results"
 +        QueryOffsetPath="queries_sqlpp"
 +        QueryFileExtension=".sqlpp">
 +    <test-group name="flwor">
 +        <test-case FilePath="flwor">
 +            <compilation-unit name="at00">
 +                <output-dir compare="Text">at00</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="flwor">
 +            <compilation-unit name="at01">
 +                <output-dir compare="Text">at01</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="flwor">
 +            <compilation-unit name="at02">
 +                <output-dir compare="Text">at02</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="flwor">
 +            <compilation-unit name="at03">
 +                <output-dir compare="Text">at03</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="flwor">
 +            <compilation-unit name="at04">
 +                <output-dir compare="Text">at04</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="flwor">
 +            <compilation-unit name="at05">
 +                <output-dir compare="Text">at05</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="flwor">
 +            <compilation-unit name="at06">
 +                <output-dir compare="Text">at06</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="flwor">
 +            <compilation-unit name="query-issue550">
 +                <output-dir compare="Text">query-issue550</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +    </test-group>
 +    <!--
 +    <test-group name="union">
 +        <test-case FilePath="union">
 +            <compilation-unit name="union">
 +                <output-dir compare="Text">union</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +    </test-group>
 +    -->
 +    <test-case FilePath="flwor">
 +        <compilation-unit name="let33">
 +            <output-dir compare="Text">let33</output-dir>
 +        </compilation-unit>
 +    </test-case>
 +    <test-group name="aggregate">
 +        <test-case FilePath="aggregate">
 +            <compilation-unit name="issue531_string_min_max">
 +                <output-dir compare="Text">issue531_string_min_max</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate">
 +            <compilation-unit name="agg_null">
 +                <output-dir compare="Text">agg_null</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate">
 +            <compilation-unit name="agg_null_rec">
 +                <output-dir compare="Text">agg_null_rec</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate">
 +            <compilation-unit name="agg_null_rec_1">
 +                <output-dir compare="Text">agg_null_rec_1</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate">
 +            <compilation-unit name="agg_number_rec">
 +                <output-dir compare="Text">agg_number_rec</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate">
 +            <compilation-unit name="avg_mixed">
 +                <output-dir compare="Text">avg_mixed</output-dir>
 +                <expected-error>org.apache.hyracks.algebricks.common.exceptions.AlgebricksException: Unexpected type</expected-error>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate">
 +            <compilation-unit name="sum_mixed">
 +                <output-dir compare="Text">sum_mixed</output-dir>
 +                <expected-error>org.apache.hyracks.algebricks.common.exceptions.AlgebricksException: Unexpected type</expected-error>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate">
 +            <compilation-unit name="min_mixed">
 +                <output-dir compare="Text">min_mixed</output-dir>
 +                <expected-error>org.apache.hyracks.algebricks.common.exceptions.AlgebricksException: Unexpected type</expected-error>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate">
 +            <compilation-unit name="agg_number">
 +                <output-dir compare="Text">agg_number</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate">
 +            <compilation-unit name="issue425_min_hetero_list_1">
 +                <output-dir compare="Text">issue425_min_hetero_list_1</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate">
 +            <compilation-unit name="issue425_min_hetero_list">
 +                <output-dir compare="Text">issue425_min_hetero_list</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate">
 +            <compilation-unit name="issue425_sum_hetero_list_1">
 +                <output-dir compare="Text">issue425_sum_hetero_list_1</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate">
 +            <compilation-unit name="issue425_sum_hetero_list">
 +                <output-dir compare="Text">issue425_sum_hetero_list</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate">
 +            <compilation-unit name="query-issue400">
 +                <output-dir compare="Text">query-issue400</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate">
 +            <compilation-unit name="issue395">
 +                <output-dir compare="Text">issue395</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate">
 +            <compilation-unit name="issue412_0">
 +                <output-dir compare="Text">issue412_0</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate">
 +            <compilation-unit name="issue412_1">
 +                <output-dir compare="Text">issue412_1</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate">
 +            <compilation-unit name="avg_double">
 +                <output-dir compare="Text">avg_double</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate">
 +            <compilation-unit name="avg_double_null">
 +                <output-dir compare="Text">avg_double_null</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate">
 +            <compilation-unit name="avg_empty_01">
 +                <output-dir compare="Text">avg_empty_01</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate">
 +            <compilation-unit name="avg_empty_02">
 +                <output-dir compare="Text">avg_empty_02</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate">
 +            <compilation-unit name="avg_float">
 +                <output-dir compare="Text">avg_float</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate">
 +            <compilation-unit name="avg_float_null">
 +                <output-dir compare="Text">avg_float_null</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate">
 +            <compilation-unit name="avg_int16">
 +                <output-dir compare="Text">avg_int16</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate">
 +            <compilation-unit name="avg_int16_null">
 +                <output-dir compare="Text">avg_int16_null</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate">
 +            <compilation-unit name="avg_int32">
 +                <output-dir compare="Text">avg_int32</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate">
 +            <compilation-unit name="avg_int32_null">
 +                <output-dir compare="Text">avg_int32_null</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate">
 +            <compilation-unit name="avg_int64">
 +                <output-dir compare="Text">avg_int64</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate">
 +            <compilation-unit name="avg_int64_null">
 +                <output-dir compare="Text">avg_int64_null</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate">
 +            <compilation-unit name="avg_int8">
 +                <output-dir compare="Text">avg_int8</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate">
 +            <compilation-unit name="avg_int8_null">
 +                <output-dir compare="Text">avg_int8_null</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate">
 +            <compilation-unit name="count_01">
 +                <output-dir compare="Text">count_01</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate">
 +            <compilation-unit name="count_empty_01">
 +                <output-dir compare="Text">count_empty_01</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate">
 +            <compilation-unit name="count_empty_02">
 +                <output-dir compare="Text">count_empty_02</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate">
 +            <compilation-unit name="count_null">
 +                <output-dir compare="Text">count_null</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <!--
 +        <test-case FilePath="aggregate">
 +          <compilation-unit name="droptype">
 +            <output-dir compare="Text">droptype</output-dir>
 +          </compilation-unit>
 +        </test-case>
 +        -->
 +        <!-- TODO(madhusudancs): These tests that test for local_<agg>/global_<agg> functions should be removed, but
 +        before that we should modify the code to make sure those built-in functions are still defined but not exposed
 +        by AQL, so leaving these test cases commented.
 +        <test-case FilePath="aggregate">
 +          <compilation-unit name="global-avg_01">
 +            <output-dir compare="Text">global-avg_01</output-dir>
 +          </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate">
 +          <compilation-unit name="global-avg_null">
 +            <output-dir compare="Text">global-avg_null</output-dir>
 +          </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate">
 +          <compilation-unit name="local-avg_double">
 +            <output-dir compare="Text">local-avg_double</output-dir>
 +          </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate">
 +          <compilation-unit name="local-avg_double_null">
 +            <output-dir compare="Text">local-avg_double_null</output-dir>
 +          </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate">
 +          <compilation-unit name="local-avg_float">
 +            <output-dir compare="Text">local-avg_float</output-dir>
 +          </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate">
 +          <compilation-unit name="local-avg_float_null">
 +            <output-dir compare="Text">local-avg_float_null</output-dir>
 +          </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate">
 +          <compilation-unit name="local-avg_int16">
 +            <output-dir compare="Text">local-avg_int16</output-dir>
 +          </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate">
 +          <compilation-unit name="local-avg_int16_null">
 +            <output-dir compare="Text">local-avg_int16_null</output-dir>
 +          </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate">
 +          <compilation-unit name="local-avg_int32">
 +            <output-dir compare="Text">local-avg_int32</output-dir>
 +          </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate">
 +          <compilation-unit name="local-avg_int32_null">
 +            <output-dir compare="Text">local-avg_int32_null</output-dir>
 +          </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate">
 +          <compilation-unit name="local-avg_int64">
 +            <output-dir compare="Text">local-avg_int64</output-dir>
 +          </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate">
 +          <compilation-unit name="local-avg_int64_null">
 +            <output-dir compare="Text">local-avg_int64_null</output-dir>
 +          </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate">
 +          <compilation-unit name="local-avg_int8">
 +            <output-dir compare="Text">local-avg_int8</output-dir>
 +          </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate">
 +          <compilation-unit name="local-avg_int8_null">
 +            <output-dir compare="Text">local-avg_int8_null</output-dir>
 +          </compilation-unit>
 +        </test-case>
 +        -->
 +        <test-case FilePath="aggregate">
 +            <compilation-unit name="max_empty_01">
 +                <output-dir compare="Text">max_empty_01</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate">
 +            <compilation-unit name="max_empty_02">
 +                <output-dir compare="Text">max_empty_02</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate">
 +            <compilation-unit name="min_empty_01">
 +                <output-dir compare="Text">min_empty_01</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate">
 +            <compilation-unit name="min_empty_02">
 +                <output-dir compare="Text">min_empty_02</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate">
 +            <compilation-unit name="scalar_avg">
 +                <output-dir compare="Text">scalar_avg</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate">
 +            <compilation-unit name="scalar_avg_empty">
 +                <output-dir compare="Text">scalar_avg_empty</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate">
 +            <compilation-unit name="scalar_avg_null">
 +                <output-dir compare="Text">scalar_avg_null</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate">
 +            <compilation-unit name="scalar_count">
 +                <output-dir compare="Text">scalar_count</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate">
 +            <compilation-unit name="scalar_count_empty">
 +                <output-dir compare="Text">scalar_count_empty</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate">
 +            <compilation-unit name="scalar_count_null">
 +                <output-dir compare="Text">scalar_count_null</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate">
 +            <compilation-unit name="scalar_max">
 +                <output-dir compare="Text">scalar_max</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate">
 +            <compilation-unit name="scalar_max_empty">
 +                <output-dir compare="Text">scalar_max_empty</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate">
 +            <compilation-unit name="scalar_max_null">
 +                <output-dir compare="Text">scalar_max_null</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate">
 +            <compilation-unit name="scalar_min">
 +                <output-dir compare="Text">scalar_min</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate">
 +            <compilation-unit name="scalar_min_empty">
 +                <output-dir compare="Text">scalar_min_empty</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate">
 +            <compilation-unit name="scalar_min_null">
 +                <output-dir compare="Text">scalar_min_null</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate">
 +            <compilation-unit name="scalar_sum">
 +                <output-dir compare="Text">scalar_sum</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate">
 +            <compilation-unit name="scalar_sum_empty">
 +                <output-dir compare="Text">scalar_sum_empty</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate">
 +            <compilation-unit name="scalar_sum_null">
 +                <output-dir compare="Text">scalar_sum_null</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate">
 +            <compilation-unit name="sum_double">
 +                <output-dir compare="Text">sum_double</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate">
 +            <compilation-unit name="sum_double_null">
 +                <output-dir compare="Text">sum_double_null</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate">
 +            <compilation-unit name="sum_empty_01">
 +                <output-dir compare="Text">sum_empty_01</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate">
 +            <compilation-unit name="sum_empty_02">
 +                <output-dir compare="Text">sum_empty_02</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate">
 +            <compilation-unit name="sum_float">
 +                <output-dir compare="Text">sum_float</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate">
 +            <compilation-unit name="sum_float_null">
 +                <output-dir compare="Text">sum_float_null</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate">
 +            <compilation-unit name="sum_int16">
 +                <output-dir compare="Text">sum_int16</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate">
 +            <compilation-unit name="sum_int16_null">
 +                <output-dir compare="Text">sum_int16_null</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate">
 +            <compilation-unit name="sum_int32">
 +                <output-dir compare="Text">sum_int32</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate">
 +            <compilation-unit name="sum_int32_null">
 +                <output-dir compare="Text">sum_int32_null</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate">
 +            <compilation-unit name="sum_int64">
 +                <output-dir compare="Text">sum_int64</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate">
 +            <compilation-unit name="sum_int64_null">
 +                <output-dir compare="Text">sum_int64_null</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate">
 +            <compilation-unit name="sum_int8">
 +                <output-dir compare="Text">sum_int8</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate">
 +            <compilation-unit name="sum_int8_null">
 +                <output-dir compare="Text">sum_int8_null</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate">
 +            <compilation-unit name="sum_null-with-pred">
 +                <output-dir compare="Text">sum_null-with-pred</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate">
 +            <compilation-unit name="sum_numeric_null">
 +                <output-dir compare="Text">sum_numeric_null</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +    </test-group>
 +    <test-group name="aggregate-sql">
 +        <test-case FilePath="aggregate-sql">
 +            <compilation-unit name="issue531_string_min_max">
 +                <output-dir compare="Text">issue531_string_min_max</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate-sql">
 +            <compilation-unit name="agg_null">
 +                <output-dir compare="Text">agg_null</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate-sql">
 +            <compilation-unit name="agg_null_rec">
 +                <output-dir compare="Text">agg_null_rec</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate-sql">
 +            <compilation-unit name="agg_null_rec_1">
 +                <output-dir compare="Text">agg_null_rec_1</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate-sql">
 +            <compilation-unit name="agg_number_rec">
 +                <output-dir compare="Text">agg_number_rec</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate-sql">
 +            <compilation-unit name="avg_mixed">
 +                <output-dir compare="Text">avg_mixed</output-dir>
 +                <expected-error>org.apache.hyracks.algebricks.common.exceptions.AlgebricksException: Unexpected type STRING</expected-error>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate-sql">
 +            <compilation-unit name="sum_mixed">
 +                <output-dir compare="Text">sum_mixed</output-dir>
 +                <expected-error>org.apache.hyracks.algebricks.common.exceptions.AlgebricksException: Unexpected type STRING</expected-error>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate-sql">
 +            <compilation-unit name="min_mixed">
 +                <output-dir compare="Text">min_mixed</output-dir>
 +                <expected-error>org.apache.hyracks.algebricks.common.exceptions.AlgebricksException: Unexpected type STRING</expected-error>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate-sql">
 +            <compilation-unit name="agg_number">
 +                <output-dir compare="Text">agg_number</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate-sql">
 +            <compilation-unit name="issue425_min_hetero_list_1">
 +                <output-dir compare="Text">issue425_min_hetero_list_1</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate-sql">
 +            <compilation-unit name="issue425_min_hetero_list">
 +                <output-dir compare="Text">issue425_min_hetero_list</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate-sql">
 +            <compilation-unit name="issue425_sum_hetero_list_1">
 +                <output-dir compare="Text">issue425_sum_hetero_list_1</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate-sql">
 +            <compilation-unit name="issue425_sum_hetero_list">
 +                <output-dir compare="Text">issue425_sum_hetero_list</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate-sql">
 +            <compilation-unit name="query-issue400">
 +                <output-dir compare="Text">query-issue400</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate-sql">
 +            <compilation-unit name="issue395">
 +                <output-dir compare="Text">issue395</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate-sql">
 +            <compilation-unit name="issue412_0">
 +                <output-dir compare="Text">issue412_0</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate-sql">
 +            <compilation-unit name="issue412_1">
 +                <output-dir compare="Text">issue412_1</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate-sql">
 +            <compilation-unit name="avg_double">
 +                <output-dir compare="Text">avg_double</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate-sql">
 +            <compilation-unit name="avg_double_null">
 +                <output-dir compare="Text">avg_double_null</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate-sql">
 +            <compilation-unit name="avg_empty_01">
 +                <output-dir compare="Text">avg_empty_01</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate-sql">
 +            <compilation-unit name="avg_empty_02">
 +                <output-dir compare="Text">avg_empty_02</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate-sql">
 +            <compilation-unit name="avg_float">
 +                <output-dir compare="Text">avg_float</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate-sql">
 +            <compilation-unit name="avg_float_null">
 +                <output-dir compare="Text">avg_float_null</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate-sql">
 +            <compilation-unit name="avg_int16">
 +                <output-dir compare="Text">avg_int16</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate-sql">
 +            <compilation-unit name="avg_int16_null">
 +                <output-dir compare="Text">avg_int16_null</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate-sql">
 +            <compilation-unit name="avg_int32">
 +                <output-dir compare="Text">avg_int32</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate-sql">
 +            <compilation-unit name="avg_int32_null">
 +                <output-dir compare="Text">avg_int32_null</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate-sql">
 +            <compilation-unit name="avg_int64">
 +                <output-dir compare="Text">avg_int64</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate-sql">
 +            <compilation-unit name="avg_int64_null">
 +                <output-dir compare="Text">avg_int64_null</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate-sql">
 +            <compilation-unit name="avg_int8">
 +                <output-dir compare="Text">avg_int8</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate-sql">
 +            <compilation-unit name="avg_int8_null">
 +                <output-dir compare="Text">avg_int8_null</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate-sql">
 +            <compilation-unit name="count_01">
 +                <output-dir compare="Text">count_01</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate-sql">
 +            <compilation-unit name="count_empty_01">
 +                <output-dir compare="Text">count_empty_01</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate-sql">
 +            <compilation-unit name="count_empty_02">
 +                <output-dir compare="Text">count_empty_02</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate-sql">
 +            <compilation-unit name="count_null">
 +                <output-dir compare="Text">count_null</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate-sql">
 +            <compilation-unit name="max_empty_01">
 +                <output-dir compare="Text">max_empty_01</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate-sql">
 +            <compilation-unit name="max_empty_02">
 +                <output-dir compare="Text">max_empty_02</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate-sql">
 +            <compilation-unit name="min_empty_01">
 +                <output-dir compare="Text">min_empty_01</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate-sql">
 +            <compilation-unit name="min_empty_02">
 +                <output-dir compare="Text">min_empty_02</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate-sql">
 +            <compilation-unit name="scalar_avg">
 +                <output-dir compare="Text">scalar_avg</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate-sql">
 +            <compilation-unit name="scalar_avg_empty">
 +                <output-dir compare="Text">scalar_avg_empty</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate-sql">
 +            <compilation-unit name="scalar_avg_null">
 +                <output-dir compare="Text">scalar_avg_null</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate-sql">
 +            <compilation-unit name="scalar_count">
 +                <output-dir compare="Text">scalar_count</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate-sql">
 +            <compilation-unit name="scalar_count_empty">
 +                <output-dir compare="Text">scalar_count_empty</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate-sql">
 +            <compilation-unit name="scalar_count_null">
 +                <output-dir compare="Text">scalar_count_null</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate-sql">
 +            <compilation-unit name="scalar_max">
 +                <output-dir compare="Text">scalar_max</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate-sql">
 +            <compilation-unit name="scalar_max_empty">
 +                <output-dir compare="Text">scalar_max_empty</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate-sql">
 +            <compilation-unit name="scalar_max_null">
 +                <output-dir compare="Text">scalar_max_null</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate-sql">
 +            <compilation-unit name="scalar_min">
 +                <output-dir compare="Text">scalar_min</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate-sql">
 +            <compilation-unit name="scalar_min_empty">
 +                <output-dir compare="Text">scalar_min_empty</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate-sql">
 +            <compilation-unit name="scalar_min_null">
 +                <output-dir compare="Text">scalar_min_null</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate-sql">
 +            <compilation-unit name="scalar_sum">
 +                <output-dir compare="Text">scalar_sum</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate-sql">
 +            <compilation-unit name="scalar_sum_empty">
 +                <output-dir compare="Text">scalar_sum_empty</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate-sql">
 +            <compilation-unit name="scalar_sum_null">
 +                <output-dir compare="Text">scalar_sum_null</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate-sql">
 +            <compilation-unit name="sum_double">
 +                <output-dir compare="Text">sum_double</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate-sql">
 +            <compilation-unit name="sum_double_null">
 +                <output-dir compare="Text">sum_double_null</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate-sql">
 +            <compilation-unit name="sum_empty_01">
 +                <output-dir compare="Text">sum_empty_01</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate-sql">
 +            <compilation-unit name="sum_empty_02">
 +                <output-dir compare="Text">sum_empty_02</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate-sql">
 +            <compilation-unit name="sum_float">
 +                <output-dir compare="Text">sum_float</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate-sql">
 +            <compilation-unit name="sum_float_null">
 +                <output-dir compare="Text">sum_float_null</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate-sql">
 +            <compilation-unit name="sum_int16">
 +                <output-dir compare="Text">sum_int16</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate-sql">
 +            <compilation-unit name="sum_int16_null">
 +                <output-dir compare="Text">sum_int16_null</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate-sql">
 +            <compilation-unit name="sum_int32">
 +                <output-dir compare="Text">sum_int32</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate-sql">
 +            <compilation-unit name="sum_int32_null">
 +                <output-dir compare="Text">sum_int32_null</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate-sql">
 +            <compilation-unit name="sum_int64">
 +                <output-dir compare="Text">sum_int64</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate-sql">
 +            <compilation-unit name="sum_int64_null">
 +                <output-dir compare="Text">sum_int64_null</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate-sql">
 +            <compilation-unit name="sum_int8">
 +                <output-dir compare="Text">sum_int8</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate-sql">
 +            <compilation-unit name="sum_int8_null">
 +                <output-dir compare="Text">sum_int8_null</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate-sql">
 +            <compilation-unit name="sum_null-with-pred">
 +                <output-dir compare="Text">sum_null-with-pred</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="aggregate-sql">
 +            <compilation-unit name="sum_numeric_null">
 +                <output-dir compare="Text">sum_numeric_null</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +    </test-group>
 +    <test-group name="boolean">
 +        <test-case FilePath="boolean">
 +            <compilation-unit name="and_01">
 +                <output-dir compare="Text">and_01</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="boolean">
 +            <compilation-unit name="and_null">
 +                <output-dir compare="Text">and_null</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="boolean">
 +            <compilation-unit name="and_null_false">
 +                <output-dir compare="Text">and_null_false</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="boolean">
 +            <compilation-unit name="not_01">
 +                <output-dir compare="Text">not_01</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +    </test-group>
 +    <test-group name="comparison">
 +        <test-case FilePath="comparison">
 +            <compilation-unit name="year_month_duration_order">
 +                <output-dir compare="Text">year_month_duration_order</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="comparison">
 +            <compilation-unit name="datetime_order">
 +                <output-dir compare="Text">datetime_order</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="comparison">
 +            <compilation-unit name="datetime_range">
 +                <output-dir compare="Text">datetime_range</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="comparison">
 +            <compilation-unit name="datetime_tzeq">
 +                <output-dir compare="Text">datetime_tzeq</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="comparison">
 +            <compilation-unit name="double">
 +                <output-dir compare="Text">double</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="comparison">
 +            <compilation-unit name="double_gte_01">
 +                <output-dir compare="Text">double_gte_01</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="comparison">
 +            <compilation-unit name="double_null">
 +                <output-dir compare="Text">double_null</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="comparison">
 +            <compilation-unit name="eq_01">
 +                <output-dir compare="Text">eq_01</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="comparison">
 +            <compilation-unit name="float">
 +                <output-dir compare="Text">float</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="comparison">
 +            <compilation-unit name="float_null">
 +                <output-dir compare="Text">float_null</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="comparison">
 +            <compilation-unit name="gt_01">
 +                <output-dir compare="Text">gt_01</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="comparison">
 +            <compilation-unit name="gte_01">
 +                <output-dir compare="Text">gte_01</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="comparison">
 +            <compilation-unit name="int16">
 +                <output-dir compare="Text">int16</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="comparison">
 +            <compilation-unit name="int16_null">
 +                <output-dir compare="Text">int16_null</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="comparison">
 +            <compilation-unit name="int32">
 +                <output-dir compare="Text">int32</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="comparison">
 +            <compilation-unit name="int32_null">
 +                <output-dir compare="Text">int32_null</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="comparison">
 +            <compilation-unit name="int64">
 +                <output-dir compare="Text">int64</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="comparison">
 +            <compilation-unit name="int64_null">
 +                <output-dir compare="Text">int64_null</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="comparison">
 +            <compilation-unit name="int8">
 +                <output-dir compare="Text">int8</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="comparison">
 +            <compilation-unit name="int8_null">
 +                <output-dir compare="Text">int8_null</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="comparison">
 +            <compilation-unit name="lt_01">
 +                <output-dir compare="Text">lt_01</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="comparison">
 +            <compilation-unit name="lte_01">
 +                <output-dir compare="Text">lte_01</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="comparison">
 +            <compilation-unit name="neq_01">
 +                <output-dir compare="Text">neq_01</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <!--
 +        <test-case FilePath="comparison">
 +          <compilation-unit name="numeric-comparison_01">
 +            <output-dir compare="Text">numeric-comparison_01</output-dir>
 +          </compilation-unit>
 +        </test-case>
 +        -->
 +        <test-case FilePath="comparison">
 +            <compilation-unit name="string">
 +                <output-dir compare="Text">string</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="comparison">
 +            <compilation-unit name="string_null">
 +                <output-dir compare="Text">string_null</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="comparison">
 +            <compilation-unit name="issue363_equality">
 +                <output-dir compare="Text">issue363_equality</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="comparison">
 +            <compilation-unit name="issue363_inequality_duration">
 +                <output-dir compare="Text">issue363_inequality_duration</output-dir>
 +                <expected-error>org.apache.hyracks.algebricks.common.exceptions.AlgebricksException: Comparison operations (GT, GE, LT, and LE) for the DURATION type are not defined</expected-error>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="comparison">
 +            <compilation-unit name="issue363_inequality_interval">
 +                <output-dir compare="Text">issue363_inequality_interval</output-dir>
 +                <expected-error>org.apache.hyracks.algebricks.common.exceptions.AlgebricksException: Comparison operations (GT, GE, LT, and LE) for the INTERVAL type are not defined</expected-error>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="comparison">
 +            <compilation-unit name="issue363_inequality_point">
 +                <output-dir compare="Text">issue363_inequality_point</output-dir>
 +                <expected-error>org.apache.hyracks.algebricks.common.exceptions.AlgebricksException: Comparison operations (GT, GE, LT, and LE) for the POINT type are not defined</expected-error>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="comparison">
 +            <compilation-unit name="issue363_inequality_line">
 +                <output-dir compare="Text">issue363_inequality_line</output-dir>
 +                <expected-error>org.apache.hyracks.algebricks.common.exceptions.AlgebricksException: Comparison operations (GT, GE, LT, and LE) for the LINE type are not defined</expected-error>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="comparison">
 +            <compilation-unit name="issue363_inequality_polygon">
 +                <output-dir compare="Text">issue363_inequality_polygon</output-dir>
 +                <expected-error>org.apache.hyracks.algebricks.common.exceptions.AlgebricksException: Comparison operations (GT, GE, LT, and LE) for the POLYGON type are not defined</expected-error>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="comparison">
 +            <compilation-unit name="issue363_inequality_rectangle">
 +                <output-dir compare="Text">issue363_inequality_rectangle</output-dir>
 +                <expected-error>org.apache.hyracks.algebricks.common.exceptions.AlgebricksException: Comparison operations (GT, GE, LT, and LE) for the RECTANGLE type are not defined</expected-error>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="comparison">
 +            <compilation-unit name="issue363_inequality_circle">
 +                <output-dir compare="Text">issue363_inequality_circle</output-dir>
 +                <expected-error>org.apache.hyracks.algebricks.common.exceptions.AlgebricksException: Comparison operations (GT, GE, LT, and LE) for the CIRCLE type are not defined</expected-error>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="comparison">
 +            <compilation-unit name="binary">
 +                <output-dir compare="Text">binary</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="comparison">
 +            <compilation-unit name="binary_null">
 +                <output-dir compare="Text">binary_null</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +    </test-group>
 +    <test-group name="constructor">
 +        <test-case FilePath="constructor">
 +            <compilation-unit name="binary_01">
 +                <output-dir compare="Text">binary_01</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="constructor">
 +            <compilation-unit name="add-null">
 +                <output-dir compare="Text">add-null</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="constructor">
 +            <compilation-unit name="boolean_01">
 +                <output-dir compare="Text">boolean_01</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="constructor">
 +            <compilation-unit name="circle_01">
 +                <output-dir compare="Text">circle_01</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="constructor">
 +            <compilation-unit name="date_01">
 +                <output-dir compare="Text">date_01</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="constructor">
 +            <compilation-unit name="datetime_01">
 +                <output-dir compare="Text">datetime_01</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="constructor">
 +            <compilation-unit name="double_01">
 +                <output-dir compare="Text">double_01</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="constructor">
 +            <compilation-unit name="duration_01">
 +                <output-dir compare="Text">duration_01</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="constructor">
 +            <compilation-unit name="duration_02">
 +                <output-dir compare="Text">duration_02</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="constructor">
 +            <compilation-unit name="float_01">
 +                <output-dir compare="Text">float_01</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="constructor">
 +            <compilation-unit name="int_01">
 +                <output-dir compare="Text">int_01</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="constructor">
 +            <compilation-unit name="interval">
 +                <output-dir compare="Text">interval</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="constructor">
 +            <compilation-unit name="line_01">
 +                <output-dir compare="Text">line_01</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="constructor">
 +            <compilation-unit name="rectangle_01">
 +                <output-dir compare="Text">rectangle_01</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="constructor">
 +            <compilation-unit name="point_01">
 +                <output-dir compare="Text">point_01</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="constructor">
 +            <compilation-unit name="polygon_01">
 +                <output-dir compare="Text">polygon_01</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="constructor">
 +            <compilation-unit name="primitive-01">
 +                <output-dir compare="Text">primitive-01</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="constructor">
 +            <compilation-unit name="primitive-02">
 +                <output-dir compare="Text">primitive-02</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="constructor">
 +            <compilation-unit name="primitive-03">
 +                <output-dir compare="Text">primitive-03</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="constructor">
 +            <compilation-unit name="primitive-04">
 +                <output-dir compare="Text">primitive-04</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="constructor">
 +            <compilation-unit name="string_01">
 +                <output-dir compare="Text">string_01</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="constructor">
 +            <compilation-unit name="time_01">
 +                <output-dir compare="Text">time_01</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="constructor">
 +            <compilation-unit name="uuid_01">
 +                <output-dir compare="Text">uuid_01</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +    </test-group>
 +    <test-group name="custord">
 +        <!--
 +    <test-case FilePath="custord">
 +      <compilation-unit name="co">
 +        <output-dir compare="Text">co</output-dir>
 +      </compilation-unit>
 +    </test-case>
 +    -->
 +        <test-case FilePath="custord">
 +            <compilation-unit name="customer_q_01">
 +                <output-dir compare="Text">customer_q_01</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="custord">
 +            <compilation-unit name="customer_q_02">
 +                <output-dir compare="Text">customer_q_02</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="custord">
 +            <compilation-unit name="customer_q_03">
 +                <output-dir compare="Text">customer_q_03</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="custord">
 +            <compilation-unit name="customer_q_04">
 +                <output-dir compare="Text">customer_q_04</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="custord">
 +            <compilation-unit name="customer_q_05">
 +                <output-dir compare="Text">customer_q_05</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="custord">
 +            <compilation-unit name="customer_q_06">
 +                <output-dir compare="Text">customer_q_06</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="custord">
 +            <compilation-unit name="customer_q_07">
 +                <output-dir compare="Text">customer_q_07</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="custord">
 +            <compilation-unit name="customer_q_08">
 +                <output-dir compare="Text">customer_q_08</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <!--
 +        <test-case FilePath="custord">
 +          <compilation-unit name="denorm-cust-order_01">
 +            <output-dir compare="Text">denorm-cust-order_01</output-dir>
 +          </compilation-unit>
 +        </test-case>
 +        -->
 +        <test-case FilePath="custord">
 +            <compilation-unit name="denorm-cust-order_02">
 +                <output-dir compare="Text">denorm-cust-order_02</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <!--
 +        <test-case FilePath="custord">
 +          <compilation-unit name="denorm-cust-order_03">
 +            <output-dir compare="Text">denorm-cust-order_03</output-dir>
 +          </compilation-unit>
 +        </test-case>
 +        -->
 +        <!--
 +        <test-case FilePath="custord">
 +          <compilation-unit name="freq-clerk">
 +            <output-dir compare="Text">freq-clerk</output-dir>
 +          </compilation-unit>
 +        </test-case>
 +        -->
 +        <test-case FilePath="custord">
 +            <compilation-unit name="join_q_01">
 +                <output-dir compare="Text">join_q_01</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="custord">
 +            <compilation-unit name="join_q_02">
 +                <output-dir compare="Text">join_q_02</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="custord">
 +            <compilation-unit name="join_q_03">
 +                <output-dir compare="Text">join_q_03</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="custord">
 +            <compilation-unit name="join_q_04">
 +                <output-dir compare="Text">join_q_04</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="custord">
 +            <compilation-unit name="join_q_05">
 +                <output-dir compare="Text">join_q_01</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="custord">
 +            <compilation-unit name="join_q_06">
 +                <output-dir compare="Text">join_q_06</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="custord">
 +            <compilation-unit name="join_q_07">
 +                <output-dir compare="Text">join_q_06</output-dir>
 +                 <expected-error>org.apache.hyracks.algebricks.common.exceptions.AlgebricksException: Could not find dataset c in dataverse test</expected-error>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="custord">
 +            <compilation-unit name="load-test">
 +                <output-dir compare="Text">load-test</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="custord">
 +            <compilation-unit name="order_q_01">
 +                <output-dir compare="Text">order_q_01</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="custord">
 +            <compilation-unit name="order_q_02">
 +                <output-dir compare="Text">order_q_02</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="custord">
 +            <compilation-unit name="order_q_03">
 +                <output-dir compare="Text">order_q_03</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="custord">
 +            <compilation-unit name="order_q_04">
 +                <output-dir compare="Text">order_q_04</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="custord">
 +            <compilation-unit name="order_q_05">
 +                <output-dir compare="Text">order_q_05</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="custord">
 +            <compilation-unit name="order_q_06">
 +                <output-dir compare="Text">order_q_06</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +    </test-group>
 +    <test-group name="dapd">
 +        <test-case FilePath="dapd">
 +            <compilation-unit name="q1">
 +                <output-dir compare="Text">q1</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="dapd">
 +            <compilation-unit name="q2">
 +                <output-dir compare="Text">q2</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <!--
 +        <test-case FilePath="dapd">
 +          <compilation-unit name="q3">
 +            <output-dir compare="Text">q3</output-dir>
 +          </compilation-unit>
 +        </test-case>
 +        -->
 +    </test-group>
 +    <test-group name="dml">
 +        <test-case FilePath="dml">
 +            <compilation-unit name="compact-dataset-and-its-indexes">
 +                <output-dir compare="Text">compact-dataset-and-its-indexes</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="dml">
 +            <compilation-unit name="using-constant-merge-policy">
 +                <output-dir compare="Text">using-constant-merge-policy</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="dml">
 +            <compilation-unit name="using-prefix-merge-policy">
 +                <output-dir compare="Text">using-prefix-merge-policy</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="dml">
 +            <compilation-unit name="using-correlated-prefix-merge-policy">
 +                <output-dir compare="Text">using-correlated-prefix-merge-policy</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="dml">
 +            <compilation-unit name="using-no-merge-policy">
 +                <output-dir compare="Text">using-no-merge-policy</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="dml">
 +            <compilation-unit name="query-issue382">
 +                <output-dir compare="Text">query-issue382</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="dml">
 +            <compilation-unit name="query-issue433">
 +                <output-dir compare="Text">query-issue433</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="dml">
 +            <compilation-unit name="query-issue288">
 +                <output-dir compare="Text">query-issue288</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="dml">
 +            <compilation-unit name="query-issue205">
 +                <output-dir compare="Text">query-issue205</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="dml">
 +            <compilation-unit name="delete-from-loaded-dataset-with-index">
 +                <output-dir compare="Text">delete-from-loaded-dataset-with-index</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="dml">
 +            <compilation-unit name="delete-from-loaded-dataset">
 +                <output-dir compare="Text">delete-from-loaded-dataset</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="dml">
 +            <compilation-unit name="delete-syntax-change">
 +                <output-dir compare="Text">delete-syntax-change</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="dml">
 +            <compilation-unit name="drop-empty-secondary-indexes">
 +                <output-dir compare="Text">drop-empty-secondary-indexes</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="dml">
 +            <compilation-unit name="drop-index">
 +                <output-dir compare="Text">drop-index</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="dml">
 +            <compilation-unit name="create-drop-cltype">
 +                <output-dir compare="Text">create-drop-cltype</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="dml">
 +            <compilation-unit name="create-drop-opntype">
 +                <output-dir compare="Text">create-drop-opntype</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="dml">
 +            <compilation-unit name="empty-load-with-index">
 +                <output-dir compare="Text">empty-load-with-index</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="dml">
 +            <compilation-unit name="insert-into-empty-dataset">
 +                <output-dir compare="Text">insert-into-empty-dataset</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="dml">
 +            <compilation-unit name="insert-into-empty-dataset-with-index">
 +                <output-dir compare="Text">insert-into-empty-dataset-with-index</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="dml">
 +            <compilation-unit name="insert-syntax">
 +                <output-dir compare="Text">insert-syntax</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="dml">
 +            <compilation-unit name="insert-and-scan-dataset">
 +                <output-dir compare="Text">insert-and-scan-dataset</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="dml">
 +            <compilation-unit name="insert-and-scan-dataset-with-index">
 +                <output-dir compare="Text">insert-and-scan-dataset-with-index</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="dml">
 +            <compilation-unit name="insert-and-scan-joined-datasets">
 +                <output-dir compare="Text">insert-and-scan-joined-datasets</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="dml">
 +            <compilation-unit name="insert-into-loaded-dataset-with-index_01">
 +                <output-dir compare="Text">insert-into-loaded-dataset-with-index_01</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="dml">
 +            <compilation-unit name="insert-into-loaded-dataset-with-index_02">
 +                <output-dir compare="Text">insert-into-loaded-dataset-with-index_02</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="dml">
 +            <compilation-unit name="insert-into-loaded-dataset_01">
 +                <output-dir compare="Text">insert-into-loaded-dataset_01</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="dml">
 +            <compilation-unit name="insert-into-loaded-dataset_02">
 +                <output-dir compare="Text">insert-into-loaded-dataset_02</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="dml">
 +            <compilation-unit name="insert-src-dst-01">
 +                <output-dir compare="Text">insert-src-dst-01</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="dml">
 +            <compilation-unit name="insert">
 +                <output-dir compare="Text">insert</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="dml">
 +            <compilation-unit name="insert-duplicated-keys">
 +                <output-dir compare="Text">insert-duplicated-keys</output-dir>
 +                <expected-error>org.apache.hyracks.storage.am.common.exceptions.TreeIndexDuplicateKeyException: Failed to insert key since key already exists</expected-error>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="dml">
 +            <compilation-unit name="insert_less_nc">
 +                <output-dir compare="Text">insert_less_nc</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <!--
 +        <test-case FilePath="dml">
 +          <compilation-unit name="load-from-hdfs">
 +            <output-dir compare="Text">load-from-hdfs</output-dir>
 +          </compilation-unit>
 +        </test-case>
 +        -->
 +        <test-case FilePath="dml">
 +            <compilation-unit name="insert-with-autogenerated-pk_adm_01">
 +                <output-dir compare="Text">insert-with-autogenerated-pk_adm_01</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="dml">
 +            <compilation-unit name="insert-with-autogenerated-pk_adm_02">
 +                <output-dir compare="Text">insert-with-autogenerated-pk_adm_02</output-dir>
 +                <expected-error>org.apache.hyracks.algebricks.common.exceptions.AlgebricksException: Duplicate field id encountered</expected-error>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="dml">
 +            <compilation-unit name="insert-with-autogenerated-pk_adm_03">
 +                <output-dir compare="Text">insert-with-autogenerated-pk_adm_03</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="dml">
 +            <compilation-unit name="load-with-autogenerated-pk_txt_01">
 +                <output-dir compare="Text">load-with-autogenerated-pk_txt_01</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="dml">
 +            <compilation-unit name="load-with-autogenerated-pk_adm_01">
 +                <output-dir compare="Text">load-with-autogenerated-pk_adm_01</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="dml">
 +            <compilation-unit name="load-with-autogenerated-pk_adm_02">
 +                <output-dir compare="Text">load-with-autogenerated-pk_adm_02</output-dir>
 +                <expected-error>org.apache.asterix.external.parser.ADMDataParser$ParseException</expected-error>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="dml">
 +            <compilation-unit name="load-with-autogenerated-pk_adm_03">
 +                <output-dir compare="Text">load-with-autogenerated-pk_adm_03</output-dir>
 +                <expected-error>org.apache.asterix.external.parser.ADMDataParser$ParseException</expected-error>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="dml">
 +            <compilation-unit name="load-with-autogenerated-pk_adm_04">
 +                <output-dir compare="Text">load-with-autogenerated-pk_adm_04</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="dml">
 +            <compilation-unit name="load-with-autogenerated-pk_csv_01">
 +                <output-dir compare="Text">load-with-autogenerated-pk_csv_01</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="dml">
 +            <compilation-unit name="load-with-autogenerated-pk_csv_02">
 +                <output-dir compare="Text">load-with-autogenerated-pk_csv_02</output-dir>
 +                <expected-error>java.lang.Exception: Result</expected-error>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="dml">
 +            <compilation-unit name="load-with-index">
 +                <output-dir compare="Text">load-with-index</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="dml">
 +            <compilation-unit name="load-with-ngram-index">
 +                <output-dir compare="Text">load-with-ngram-index</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="dml">
 +            <compilation-unit name="load-with-rtree-index">
 +                <output-dir compare="Text">load-with-rtree-index</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="dml">
 +            <compilation-unit name="load-with-word-index">
 +                <output-dir compare="Text">load-with-word-index</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="dml">
 +            <compilation-unit name="opentype-c2o-recursive">
 +                <output-dir compare="Text">opentype-c2o-recursive</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="dml">
 +            <compilation-unit name="opentype-c2o">
 +                <output-dir compare="Text">opentype-c2o</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="dml">
 +            <compilation-unit name="opentype-closed-optional">
 +                <output-dir compare="Text">opentype-closed-optional</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="dml">
 +            <compilation-unit name="opentype-insert">
 +                <output-dir compare="Text">opentype-insert</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="dml">
 +            <compilation-unit name="opentype-insert2">
 +                <output-dir compare="Text">opentype-insert2</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="dml">
 +            <compilation-unit name="opentype-noexpand">
 +                <output-dir compare="Text">opentype-noexpand</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="dml">
 +            <compilation-unit name="opentype-o2c-recursive">
 +                <output-dir compare="Text">opentype-o2c-recursive</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="dml">
 +            <compilation-unit name="opentype-o2c">
 +                <output-dir compare="Text">opentype-o2c</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="dml">
 +            <compilation-unit name="opentype-o2o">
 +                <output-dir compare="Text">opentype-o2o</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="dml">
 +            <compilation-unit name="scan-delete-btree-secondary-index-nullable">
 +                <output-dir compare="Text">scan-delete-btree-secondary-index-nullable</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="dml">
 +            <compilation-unit name="scan-delete-rtree-secondary-index-nullable">
 +                <output-dir compare="Text">scan-delete-rtree-secondary-index-nullable</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="dml">
 +            <compilation-unit name="scan-delete-rtree-secondary-index">
 +                <output-dir compare="Text">scan-delete-rtree-secondary-index</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="dml">
 +            <compilation-unit name="scan-insert-btree-secondary-index-nullable">
 +                <output-dir compare="Text">scan-insert-btree-secondary-index-nullable</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="dml">
 +            <compilation-unit name="scan-insert-rtree-secondary-index-nullable">
 +                <output-dir compare="Text">scan-insert-rtree-secondary-index-nullable</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="dml">
 +            <compilation-unit name="scan-insert-rtree-secondary-index">
 +                <output-dir compare="Text">scan-insert-rtree-secondary-index</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="dml">
 +            <compilation-unit name="scan-insert-inverted-index-ngram-secondary-index">
 +                <output-dir compare="Text">scan-insert-inverted-index-ngram-secondary-index</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="dml">
 +            <compilation-unit name="scan-insert-inverted-index-word-secondary-index">
 +                <output-dir compare="Text">scan-insert-inverted-index-word-secondary-index</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="dml">
 +            <compilation-unit name="scan-insert-inverted-index-ngram-secondary-index-nullable">
 +                <output-dir compare="Text">scan-insert-inverted-index-ngram-secondary-index-nullable</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="dml">
 +            <compilation-unit name="scan-insert-inverted-index-word-secondary-index-nullable">
 +                <output-dir compare="Text">scan-insert-inverted-index-word-secondary-index-nullable</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="dml">
 +            <compilation-unit name="scan-delete-inverted-index-ngram-secondary-index">
 +                <output-dir compare="Text">scan-delete-inverted-index-ngram-secondary-index</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="dml">
 +            <compilation-unit name="scan-delete-inverted-index-word-secondary-index">
 +                <output-dir compare="Text">scan-delete-inverted-index-word-secondary-index</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="dml">
 +            <compilation-unit name="scan-delete-inverted-index-ngram-secondary-index-nullable">
 +                <output-dir compare="Text">scan-delete-inverted-index-ngram-secondary-index-nullable</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="dml">
 +            <compilation-unit name="scan-delete-inverted-index-word-secondary-index-nullable">
 +                <output-dir compare="Text">scan-delete-inverted-index-word-secondary-index-nullable</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="dml">
 +            <compilation-unit name="load-with-index-open">
 +                <output-dir compare="Text">load-with-index-open</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="dml">
 +            <compilation-unit name="load-with-ngram-index-open">
 +                <output-dir compare="Text">load-with-ngram-index-open</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="dml">
 +            <compilation-unit name="load-with-rtree-index-open">
 +                <output-dir compare="Text">load-with-rtree-index-open</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="dml">
 +            <compilation-unit name="load-with-word-index-open">
 +                <output-dir compare="Text">load-with-word-index-open</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="dml">
 +            <compilation-unit name="scan-delete-btree-secondary-index-open">
 +                <output-dir compare="Text">scan-delete-btree-secondary-index-open</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="dml">
 +            <compilation-unit name="scan-delete-inverted-index-ngram-secondary-index-open">
 +                <output-dir compare="Text">scan-delete-inverted-index-ngram-secondary-index-open</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="dml">
 +            <compilation-unit name="scan-delete-inverted-index-word-secondary-index-open">
 +                <output-dir compare="Text">scan-delete-inverted-index-word-secondary-index-open</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="dml">
 +            <compilation-unit name="scan-delete-rtree-secondary-index-open">
 +                <output-dir compare="Text">scan-delete-rtree-secondary-index-open</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="dml">
 +            <compilation-unit name="scan-insert-btree-secondary-index-open">
 +                <output-dir compare="Text">scan-insert-btree-secondary-index-open</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="dml">
 +            <compilation-unit name="scan-insert-inverted-index-ngram-secondary-index-open">
 +                <output-dir compare="Text">scan-insert-inverted-index-ngram-secondary-index-open</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="dml">
 +            <compilation-unit name="scan-insert-inverted-index-word-secondary-index-open">
 +                <output-dir compare="Text">scan-insert-inverted-index-word-secondary-index-open</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="dml">
 +            <compilation-unit name="scan-insert-rtree-secondary-index-open">
 +                <output-dir compare="Text">scan-insert-rtree-secondary-index-open</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="dml">
 +            <compilation-unit name="delete-multi-statement">
 +                <output-dir compare="Text">delete-multi-statement</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +    </test-group>
 +    <test-group name="employee">
 +        <test-case FilePath="employee">
 +            <compilation-unit name="q_01">
 +                <output-dir compare="Text">q_01</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +        <test-case FilePath="employee">
 +            <compilation-unit name="q_02">
 +                <output-dir compare="Text">q_02</output-dir>
 +            </compilation-unit>
 +        </test-case>
 +    </test-group>
 +    <test-group name="failure">
 +        <!--
 +        <test-case FilePath="failure">
 +          <compilation-unit name="q1_pricing_summary_report_failure">
 +            <output-dir compare="Text">q1_pricing_summary_report_failure</output-dir>
 +          </compilation-unit>
 +        </test-case>
 +        -->
 +    </test-group>
 +    <!--
 +    <test-group name="flwor">
 +      <test-case FilePath="flwor">
 +        <compilation-unit name="for01">
 +          <output-dir compare="Text">for01</output-dir>
 +        </compilation-unit>
 +      </test-case>
 +      <test-case FilePath="flwor">
 +        <compilation-unit name="for02">
 +          <output-dir compare="Text">for02</output-dir>
 +        </compilation-unit>
 +      </test-case>
 +      <test-case FilePath="flwor">
 +        <compilation-unit name="for03">
 +          <output-dir compare="Text">for03</output-dir>
 +        </compilation-unit>
 +      </test-case>
 +      <test-case FilePath="flwor">
 +        <compilation-unit name="for04">
 +          <output-dir compare="Text">for04</output-dir>
 +        </compilation-unit>
 +      </test-case>
 +      <test-case FilePath="flwor">
 +        <compilation-unit name="for05">
 +          <output-dir compare="Text">for05</output-dir>
 +        </compilation-unit>
 +      </test-case>
 +      <test-case FilePath="flwor">
 +        <compilation-unit name="for06">
 +          <output-dir compare="Text">for06</output-dir>
 +        </compilation-unit>
 +      </test-case>
 +      <test-case FilePath="flwor">
 +        <compilation-unit name="for07">
 +          <output-dir compare="Text">for07</output-dir>
 +        </compilation-unit>
 +      </test-case>
 +      <test-case FilePath="flwor">
 +        <compilation-unit name="for08">
 +          <output-dir compare="Text">for08</output-dir>
 +        </compilation-unit>
 +      </test-case>
 +      <test-case FilePath="flwor">
 +        <compilation-unit name="for09">
 +          <output-dir compare="Text">for09</output-dir>
 +        </compilation-unit>
 +      </test-case>
 +      <test-case FilePath="flwor">
 +        <compilation-unit name="for10">
 +          <output-dir compare="Text">for10</output-dir>
 +        </compilation-unit>
 +      </test-case>
 +      <test-case FilePath="flwor">
 +        <compilation-unit name="for11">
 +          <output-dir compare="Text">for11</output-dir>
 +        </compilation-unit>
 +      </test-case>
 +      <test-case FilePath="flwor">
 +        <compilation-unit name="for12">
 +          <output-dir compare="Text">for12</output-dir>
 +        </compilation-unit>
 +      </test-case>
 +      <test-case FilePath="flwor">
 +        <compilation-unit name="for13">
 +          <output-dir compare="Text">for13</output-dir>
 +        </compilation-unit>
 +      </test-case>
 +      <test-case FilePath="flwor">
 +        <compilation-unit name="for14">
 +          <output-dir compare="Text">for14</output-dir>
 +        </compilation-unit>
 +      </test-case>
 +      <test-case FilePath="flwor">
 +        <compilation-unit name="for15">
 +          <output-dir compare="Text">for15</output-dir>
 +        </compilation-unit>
 +      </test-case>
 +      <test-case FilePath="flwor">
 +        <compilation-unit name="for16">
 +          <output-dir compare="Text">for16</output-dir>
 +        </compilation-unit>
 +      </test-case>
 +      <test-case FilePath="flwor">
 +        <compilation-unit name="for17">
 +          <output-dir compare="Text">for17</output-dir>
 +        </compilation-unit>
 +      </test-case>
 +      <test-case FilePath="flwor">
 +        <compilation-unit name="for18">
 +          <output-dir compare="Text">for18</output-dir>
 +        </compilation-unit>
 +      </test-case>
 +      <test-case FilePath="flwor">
 +        <compilation-unit name="for19">
 +          <output-dir compare="Text">for19</output-dir>
 +        </compilation-unit>
 +      </test-case>
 +      <test-case FilePath="flwor">
 +        <compilation-unit name="grpby01">
 +          <output-dir compare="Text">grpby01</output-dir>
 +        </compilation-unit>
 +      </test-case>
 +      <test-case FilePath="flwor">
 +        <compilation-unit name="grpby02">
 +          <output-dir compare="Text">grpby02</output-dir>
 +        </compilation-unit>
 +      </test-case>
 +      <test-case FilePath="flwor">
 +        <compilation-unit name="let01">
 +          <output-dir compare="Text

<TRUNCATED>


[44/50] [abbrv] incubator-asterixdb git commit: Move merged files

Posted by im...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d630d1a2/asterix-app/src/test/resources/runtimets/results/external-library/classad-parser-old/classad-parser-old.1.adm
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/resources/runtimets/results/external-library/classad-parser-old/classad-parser-old.1.adm b/asterix-app/src/test/resources/runtimets/results/external-library/classad-parser-old/classad-parser-old.1.adm
deleted file mode 100644
index 56abc61..0000000
--- a/asterix-app/src/test/resources/runtimets/results/external-library/classad-parser-old/classad-parser-old.1.adm
+++ /dev/null
@@ -1,5 +0,0 @@
-{ "GlobalJobId": "crab3-1@submit-5.t2.ucsd.edu#1206367.0#1439615565", "CRAB_UserGroup": "undefined", "JobStartDate": 1439615574, "CRAB_UserDN": "/DC=ch/DC=cern/OU=Organic Units/OU=Users/CN=ferencek/CN=650164/CN=Dinko Ferencek", "JobStatus": 4, "CRAB_TFileOutputFiles": "{  }", "LeaveJobInQueue": "JobStatus == 4 && ( CompletionDate =?= UNDDEFINED || CompletionDate == 0 || ( ( time() - CompletionDate ) < 864000 ) )", "DAG_InRecovery": 0, "AutoClusterId": 10378, "CRAB_TaskWorker": "vocms052", "OnExitRemove": "( ExitSignal =?= 11 || ( ExitCode =!= undefined && ExitCode >= 0 && ExitCode <= 2 ) )", "MaxWallTimeMins_RAW": 1315, "JobCurrentStartDate": 1439615574, "CRAB_ASOTimeout": 86400, "CoreSize": -1, "CRAB_AsyncDest": "T3_US_FNALLPC", "StageInFinish": 1439615572, "ExitStatus": 0, "ReleaseReason": "Data files spooled", "CRAB_DBSURL": "https://cmsweb.cern.ch/dbs/prod/global/DBSReader", "x509userproxyexpiration": 1440294044, "CurrentTime": "time()", "X509UserProxy": "3a7798796bc24a800001338
 917ec45991bcf0a96", "WantCheckpoint": false, "RemoteWallClockTime": 158333.0d, "In": "/dev/null", "MaxHosts": 1, "RootDir": "/", "NumRestarts": 0, "DiskUsage_RAW": 1, "DAG_Status": 0, "SUBMIT_x509userproxy": "/data/certs/creds/3a7798796bc24a800001338917ec45991bcf0a96", "EnteredCurrentStatus": 1439773907, "CRAB_RestURInoAPI": "/crabserver/prod", "HoldKillSig": "SIGUSR1", "RequestDisk": "DiskUsage", "MyType": "Job", "PeriodicRemove": "( JobStatus == 5 ) && ( time() - EnteredCurrentStatus > 30 * 86400 )", "RemoveKillSig": "SIGUSR1", "CRAB_ISB": "https://cmsweb.cern.ch/crabcache", "Cmd": "dag_bootstrap_startup.sh", "CondorVersion": "$CondorVersion: 8.3.1 Jun 19 2015 $", "DAG_NodesReady": 0, "CRAB_AdditionalOutputFiles": "{  }", "ShouldTransferFiles": "YES", "CRAB_Workflow": "150815_044810:ferencek_crab_Stop2ToStop1H_Stop1M200_TuneCUETP8M1_13TeV-madgraph-pythia8_LHE", "TargetType": "Machine", "MinHosts": 1, "NumCkpts_RAW": 0, "RequestCpus": 1, "CRAB_UserRole": "undefined", "RemoteUserCpu
 ": 0.0d, "NiceUser": false, "CRAB_AlgoArgs": "{\"splitOnRun\": false, \"events_per_job\": {\"halt_job_on_file_boundaries\": false, \"events_per_lumi\": 100, \"algorithm\": \"EventBased\", \"applyLumiCorrection\": true, \"runs\": [], \"lumis\": [], \"lheInputFiles\": true, \"splitOnRun\": false, \"events_per_job\": 50000}, \"halt_job_on_file_boundaries\": false}", "Out": "_condor_stdout", "ImageSize_RAW": 100, "DAG_NodesPostrun": 0, "CRAB_JobArch": "slc6_amd64_gcc481", "CumulativeSuspensionTime": 0, "AutoClusterAttrs": "CheckpointPlatform,DESIRED_Gatekeepers,DESIRED_Sites,MaxWallTimeMins,RequestMemory,REQUIRED_OS,JobUniverse,LastCheckpointPlatform,NumCkpts,x509userproxyfirstfqan,x509userproxysubject,MachineLastMatchTime,DynamicSlot,PartitionableSlot,Slot1_ExpectedMachineGracefulDrainingCompletion,Slot1_JobStarts,Slot1_SelfMonitorAge,Slot1_TotalTimeClaimedBusy,Slot1_TotalTimeUnclaimedIdle,CMS_ALLOW_OVERFLOW,CRAB_UserRole,DESIRED_Overflow_Region,WMAgent_AgentName,CMSGroups,_condor_Requ
 estCpus,_condor_RequestDisk,_condor_RequestMemory,RequestCpus,RequestDisk,WithinResourceLimits,opportunistic_job,ConcurrencyLimits,NiceUser,Rank,Requirements,DiskUsage", "LastHoldReasonCode": 16, "NumCkpts": 0, "CRAB_BlacklistT1": 0, "Err": "_condor_stderr", "JobFinishedHookDone": 1439773907, "RequestMemory_RAW": 2000, "TransferOutputRemaps": "undefined", "ProcId": 0, "ImageSize": 100, "JobUniverse": 7, "DAG_NodesTotal": 30, "CRAB_JobType": "analysis", "SUBMIT_Iwd": "/data/srv/tmp/_150815_044810:ferencek_crab_Stop2ToStop1H_Stop1M200_TuneCUETP8M1_13TeV-madgraph-pythia8_LHEF9C8tB", "WhenToTransferOutput": "ON_EXIT", "ExitBySignal": false, "OnExitHold": "( ExitCode =!= undefined && ExitCode != 0 )", "OrigMaxHosts": 1, "RequestMemory": 2000, "NumJobStarts": 1, "CRAB_UserHN": "ferencek", "LastHoldReason": "Spooling input data files", "TotalSuspensions": 0, "CRAB_FailedNodeLimit": -1, "ExitCode": 0, "CRAB_PublishName": "LHE-17521057f93ed9cadf21dd45b3505145", "CRAB_UserWebDir": "http://sub
 mit-5.t2.ucsd.edu/CSstoragePath/67/uscms5050/150815_044810:ferencek_crab_Stop2ToStop1H_Stop1M200_TuneCUETP8M1_13TeV-madgraph-pythia8_LHE", "JobNotification": 0, "CRAB_DashboardTaskType": "analysis", "SUBMIT_TransferOutputRemaps": "_condor_stdout=/data/srv/tmp/_150815_044810:ferencek_crab_Stop2ToStop1H_Stop1M200_TuneCUETP8M1_13TeV-madgraph-pythia8_LHEF9C8tB/request.out;_condor_stderr=/data/srv/tmp/_150815_044810:ferencek_crab_Stop2ToStop1H_Stop1M200_TuneCUETP8M1_13TeV-madgraph-pythia8_LHEF9C8tB/request.err", "CRAB_PublishDBSURL": "https://cmsweb.cern.ch/dbs/prod/phys03/DBSWriter", "LocalUserCpu": 0.0d, "BufferBlockSize": 32768, "LastJobStatus": 2, "CommittedTime": 0, "CRAB_SaveLogsFlag": 0, "LastSuspensionTime": 0, "TaskType": "ROOT", "DAG_NodesDone": 30, "CumulativeSlotTime": 158333.0d, "TransferOutput": "RunJobs.dag.dagman.out, RunJobs.dag.rescue.001", "NumSystemHolds": 0, "CRAB_RetryOnASOFailures": 1, "OtherJobRemoveRequirements": "DAGManJobId =?= ClusterId", "CondorPlatform": "$C
 ondorPlatform: X86_64-ScientificLinux_6.6 $", "PeriodicRelease": false, "JobRunCount": 1, "CRAB_Publish": 1, "JobPrio": 10, "CRAB_TransferOutputs": 1, "CRAB_Attempt": 0, "LocalSysCpu": 0.0d, "RemoteSysCpu": 0.0d, "TransferInput": "gWMS-CMSRunAnalysis.sh, CMSRunAnalysis.sh, cmscp.py, RunJobs.dag, Job.submit, dag_bootstrap.sh, AdjustSites.py, site.ad, site.ad.json, run_and_lumis.tar.gz, sandbox.tar.gz, CMSRunAnalysis.tar.gz, TaskManagerRun.tar.gz", "PeriodicHold": false, "CRAB_NumAutomJobRetries": 2, "CRAB_LumiMask": "{}", "CRAB_InputData": "/Stop2ToStop1H_Stop1M200_TuneCUETP8M1_13TeV-madgraph-pythia8", "WantRemoteIO": true, "CommittedSuspensionTime": 0, "CRAB_JobSW": "CMSSW_7_1_18", "StageInStart": 1439615569, "CRAB_SiteWhitelist": "{ \"T3_US_FNALLPC\",\"T2_US_Purdue\",\"T2_US_Nebraska\" }", "CompletionDate": 1439773907, "StreamErr": false, "CRAB_RestHost": "cmsweb.cern.ch", "RemoteCondorSetup": "", "CRAB_ReqName": "150815_044810:ferencek_crab_Stop2ToStop1H_Stop1M200_TuneCUETP8M1_13T
 eV-madgraph-pythia8_LHE", "DAG_NodesPrerun": 0, "WantRemoteSyscalls": false, "DAG_NodesQueued": 0, "DAG_NodesUnready": 0, "Owner": "uscms5050", "Requirements": "true || false && TARGET.OPSYS == \"LINUX\" && TARGET.ARCH == \"X86_64\" && TARGET.HasFileTransfer && TARGET.Disk >= RequestDisk && TARGET.Memory >= RequestMemory", "CRAB_JobCount": 30, "CRAB_ASOURL": "https://cmsweb.cern.ch/couchdb", "CRAB_SplitAlgo": "EventBased", "DiskUsage": 1, "CRAB_MaxPost": 20, "ClusterId": 1206367, "BufferSize": 524288, "DAG_NodesFailed": 0, "MaxWallTimeMins": 1400, "CRAB_PublishGroupName": 0, "CommittedSlotTime": 0, "CRAB_SiteBlacklist": "{  }", "Args": "RunJobs.dag", "CRAB_EDMOutputFiles": "{ \"Stop2ToStop1H_Stop1M200_TuneCUETP8M1_13TeV-madgraph-pythia8_LHE.root\" }", "Environment": "strcat(\"PATH=/usr/bin:/bin CRAB3_VERSION=3.3.0-pre1 CONDOR_ID=\",ClusterId,\".\",ProcId,\" CRAB_RUNTIME_TARBALL=local CRAB_TASKMANAGER_TARBALL=local\")", "CRAB_UserVO": "cms", "Iwd": "/data/condor_local/spool/6367/0/cl
 uster1206367.proc0.subproc0", "QDate": 1439615565, "CurrentHosts": 0, "User": "uscms5050@cms", "StreamOut": false }
-{ "GlobalJobId": "crab3-1@submit-5.t2.ucsd.edu#1217455.0#1439764883", "CRAB_UserGroup": "dcms", "JobStartDate": 1439764892, "CRAB_UserDN": "/C=DE/O=GermanGrid/OU=KIT/CN=Andrew Gilbert", "JobStatus": 4, "CRAB_TFileOutputFiles": "{  }", "LeaveJobInQueue": "JobStatus == 4 && ( CompletionDate =?= UNDDEFINED || CompletionDate == 0 || ( ( time() - CompletionDate ) < 864000 ) )", "DAG_InRecovery": 0, "AutoClusterId": 10378, "CRAB_TaskWorker": "vocms052", "OnExitRemove": "( ExitSignal =?= 11 || ( ExitCode =!= undefined && ExitCode >= 0 && ExitCode <= 2 ) )", "MaxWallTimeMins_RAW": 1315, "JobCurrentStartDate": 1439764892, "CRAB_ASOTimeout": 86400, "CoreSize": -1, "CRAB_AsyncDest": "T2_CH_CERN", "StageInFinish": 1439764891, "ExitStatus": 0, "ReleaseReason": "Data files spooled", "CRAB_DBSURL": "https://cmsweb.cern.ch/dbs/prod/global/DBSReader", "CurrentTime": "time()", "X509UserProxy": "63f0c4d862d8b4e4ddcfd29ed85b6b5899660759", "WantCheckpoint": false, "RemoteWallClockTime": 82427.0d, "In": 
 "/dev/null", "MaxHosts": 1, "RootDir": "/", "NumRestarts": 0, "DiskUsage_RAW": 1, "DAG_Status": 0, "SUBMIT_x509userproxy": "/data/certs/creds/63f0c4d862d8b4e4ddcfd29ed85b6b5899660759", "EnteredCurrentStatus": 1439847319, "CRAB_RestURInoAPI": "/crabserver/prod", "HoldKillSig": "SIGUSR1", "RequestDisk": "DiskUsage", "MyType": "Job", "PeriodicRemove": "( JobStatus == 5 ) && ( time() - EnteredCurrentStatus > 30 * 86400 )", "RemoveKillSig": "SIGUSR1", "CRAB_ISB": "https://cmsweb.cern.ch/crabcache", "Cmd": "dag_bootstrap_startup.sh", "CondorVersion": "$CondorVersion: 8.3.1 Jun 19 2015 $", "DAG_NodesReady": 0, "CRAB_AdditionalOutputFiles": "{ \"combine_output.tar\" }", "ShouldTransferFiles": "YES", "CRAB_Workflow": "150816_222636:agilbert_crab_prefit_cms_asimov_A1_5D", "TargetType": "Machine", "MinHosts": 1, "NumCkpts_RAW": 0, "RequestCpus": 1, "CRAB_UserRole": "undefined", "RemoteUserCpu": 0.0d, "NiceUser": false, "CRAB_AlgoArgs": "{\"splitOnRun\": false, \"events_per_job\": {\"halt_job_o
 n_file_boundaries\": false, \"events_per_lumi\": 100, \"algorithm\": \"EventBased\", \"applyLumiCorrection\": true, \"runs\": [], \"lumis\": [], \"splitOnRun\": false, \"events_per_job\": 1}, \"halt_job_on_file_boundaries\": false}", "Out": "_condor_stdout", "ImageSize_RAW": 100, "DAG_NodesPostrun": 0, "CRAB_JobArch": "slc6_amd64_gcc491", "CumulativeSuspensionTime": 0, "AutoClusterAttrs": "CheckpointPlatform,DESIRED_Gatekeepers,DESIRED_Sites,MaxWallTimeMins,RequestMemory,REQUIRED_OS,JobUniverse,LastCheckpointPlatform,NumCkpts,x509userproxyfirstfqan,x509userproxysubject,MachineLastMatchTime,DynamicSlot,PartitionableSlot,Slot1_ExpectedMachineGracefulDrainingCompletion,Slot1_JobStarts,Slot1_SelfMonitorAge,Slot1_TotalTimeClaimedBusy,Slot1_TotalTimeUnclaimedIdle,CMS_ALLOW_OVERFLOW,CRAB_UserRole,DESIRED_Overflow_Region,WMAgent_AgentName,CMSGroups,_condor_RequestCpus,_condor_RequestDisk,_condor_RequestMemory,RequestCpus,RequestDisk,WithinResourceLimits,opportunistic_job,ConcurrencyLimits,N
 iceUser,Rank,Requirements,DiskUsage", "LastHoldReasonCode": 16, "NumCkpts": 0, "CRAB_BlacklistT1": 0, "Err": "_condor_stderr", "JobFinishedHookDone": 1439847319, "RequestMemory_RAW": 2000, "TransferOutputRemaps": "undefined", "ProcId": 0, "ImageSize": 100, "JobUniverse": 7, "DAG_NodesTotal": 25, "CRAB_JobType": "analysis", "SUBMIT_Iwd": "/data/srv/tmp/_150816_222636:agilbert_crab_prefit_cms_asimov_A1_5DYpFxP9", "WhenToTransferOutput": "ON_EXIT", "ExitBySignal": false, "OnExitHold": "( ExitCode =!= undefined && ExitCode != 0 )", "OrigMaxHosts": 1, "RequestMemory": 2000, "NumJobStarts": 1, "CRAB_UserHN": "agilbert", "LastHoldReason": "Spooling input data files", "TotalSuspensions": 0, "CRAB_FailedNodeLimit": -1, "ExitCode": 0, "CRAB_PublishName": "prefit_cms_asimov_A1_5D-59ffde2b5d41be5f0c401d0a6a8a0194", "CRAB_UserWebDir": "http://submit-5.t2.ucsd.edu/CSstoragePath/73/uscms5616/150816_222636:agilbert_crab_prefit_cms_asimov_A1_5D", "JobNotification": 0, "CRAB_DashboardTaskType": "anal
 ysis", "SUBMIT_TransferOutputRemaps": "_condor_stdout=/data/srv/tmp/_150816_222636:agilbert_crab_prefit_cms_asimov_A1_5DYpFxP9/request.out;_condor_stderr=/data/srv/tmp/_150816_222636:agilbert_crab_prefit_cms_asimov_A1_5DYpFxP9/request.err", "CRAB_PublishDBSURL": "https://cmsweb.cern.ch/dbs/prod/phys03/DBSWriter", "LocalUserCpu": 0.0d, "BufferBlockSize": 32768, "LastJobStatus": 2, "CommittedTime": 0, "CRAB_SaveLogsFlag": 0, "LastSuspensionTime": 0, "TaskType": "ROOT", "DAG_NodesDone": 25, "CumulativeSlotTime": 82427.0d, "TransferOutput": "RunJobs.dag.dagman.out, RunJobs.dag.rescue.001", "NumSystemHolds": 0, "CRAB_RetryOnASOFailures": 1, "OtherJobRemoveRequirements": "DAGManJobId =?= ClusterId", "CondorPlatform": "$CondorPlatform: X86_64-ScientificLinux_6.6 $", "PeriodicRelease": false, "JobRunCount": 1, "CRAB_Publish": 0, "JobPrio": 10, "CRAB_TransferOutputs": 1, "CRAB_Attempt": 0, "LocalSysCpu": 0.0d, "RemoteSysCpu": 0.0d, "TransferInput": "gWMS-CMSRunAnalysis.sh, CMSRunAnalysis.sh,
  cmscp.py, RunJobs.dag, Job.submit, dag_bootstrap.sh, AdjustSites.py, site.ad, site.ad.json, run_and_lumis.tar.gz, sandbox.tar.gz, CMSRunAnalysis.tar.gz, TaskManagerRun.tar.gz", "PeriodicHold": false, "CRAB_NumAutomJobRetries": 2, "CRAB_LumiMask": "{}", "CRAB_InputData": "/MinBias", "WantRemoteIO": true, "CommittedSuspensionTime": 0, "CRAB_JobSW": "CMSSW_7_4_0_pre9", "StageInStart": 1439764886, "CRAB_SiteWhitelist": "{  }", "CompletionDate": 1439847319, "StreamErr": false, "CRAB_RestHost": "cmsweb.cern.ch", "RemoteCondorSetup": "", "CRAB_ReqName": "150816_222636:agilbert_crab_prefit_cms_asimov_A1_5D", "DAG_NodesPrerun": 0, "WantRemoteSyscalls": false, "DAG_NodesQueued": 0, "DAG_NodesUnready": 0, "Owner": "uscms5616", "Requirements": "true || false && TARGET.OPSYS == \"LINUX\" && TARGET.ARCH == \"X86_64\" && TARGET.HasFileTransfer && TARGET.Disk >= RequestDisk && TARGET.Memory >= RequestMemory", "CRAB_JobCount": 25, "CRAB_ASOURL": "https://cmsweb.cern.ch/couchdb", "CRAB_SplitAlgo": "
 EventBased", "DiskUsage": 1, "CRAB_MaxPost": 20, "ClusterId": 1217455, "BufferSize": 524288, "DAG_NodesFailed": 0, "MaxWallTimeMins": 1400, "CRAB_PublishGroupName": 0, "CommittedSlotTime": 0, "CRAB_SiteBlacklist": "{ \"T2_FR_CCIN2P3\",\"T1_IT_CNAF\",\"T1_ES_PIC\",\"T1_UK_RAL\",\"T2_FI_HIP\",\"T2_US_Nebraska\" }", "Args": "RunJobs.dag", "CRAB_EDMOutputFiles": "{  }", "Environment": "strcat(\"PATH=/usr/bin:/bin CRAB3_VERSION=3.3.0-pre1 CONDOR_ID=\",ClusterId,\".\",ProcId,\" CRAB_RUNTIME_TARBALL=local CRAB_TASKMANAGER_TARBALL=local\")", "CRAB_UserVO": "cms", "Iwd": "/data/condor_local/spool/7455/0/cluster1217455.proc0.subproc0", "QDate": 1439764883, "CurrentHosts": 0, "User": "uscms5616@cms", "StreamOut": false }
-{ "GlobalJobId": "crab3-1@submit-5.t2.ucsd.edu#1233705.0#1439964847", "PostJobPrio1": -1439209593, "MATCH_EXP_JOB_GLIDECLIENT_Name": "CMSG-v1_0.main", "PostJobPrio2": 2, "JobStartDate": 1439965560, "CRAB_UserGroup": "undefined", "MATCH_EXP_JOB_GLIDEIN_MaxMemMBs": "2800", "MATCH_GLIDEIN_Name": "gfactory_instance", "JobStatus": 4, "JOB_GLIDEIN_CMSSite": "$$(GLIDEIN_CMSSite:Unknown)", "CRAB_TaskWorker": "vocms052", "MATCH_GLIDEIN_Max_Walltime": 603000, "JOB_GLIDEIN_SiteWMS": "$$(GLIDEIN_SiteWMS:Unknown)", "CoreSize": -1, "CRAB_AsyncDest": "T2_KR_KNU", "MATCH_EXP_JOB_Site": "CERN", "Rank": 0.0d, "JOB_GLIDEIN_Memory": "$$(Memory:Unknown)", "WantCheckpoint": false, "RemoteWallClockTime": 165965.0d, "JOB_GLIDEIN_Name": "$$(GLIDEIN_Name:Unknown)", "DESIRED_SITES": "T1_DE_KIT,T2_UK_London_IC,T2_CH_CERN", "MaxHosts": 1, "MATCH_EXP_JOB_CMSSite": "T2_CH_CERN", "RootDir": "/", "JOB_GLIDEIN_ToDie": "$$(GLIDEIN_ToDie:Unknown)", "RecentBlockWriteKbytes": 0, "CRAB_Dest": "/store/temp/user/kbutanov.0
 3af76ad04ddc195ee96e6a5469f1bbb1777390d/HWWwidthRun2/LatinoTrees_V4/WJetsToLNu_HT-600To800_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/crab_25ns_WJetsToLNu_HT600_800/150810_122536", "JOB_GLIDEIN_SiteWMS_Queue": "$$(GLIDEIN_SiteWMS_Queue:Unknown)", "CRAB_RestURInoAPI": "/crabserver/prod", "EnteredCurrentStatus": 1440131525, "MATCH_EXP_JOBGLIDEIN_CMSSite": "T2_CH_CERN", "RequestDisk": 100000, "MATCH_EXP_JOB_GLIDEIN_SiteWMS": "LSF", "MyType": "Job", "MATCH_GLIDEIN_ClusterId": 59069, "PeriodicRemove": "( ( JobStatus =?= 5 ) && ( time() - EnteredCurrentStatus > 7 * 60 ) ) || ( ( JobStatus =?= 2 ) && ( ( MemoryUsage > RequestMemory ) || ( MaxWallTimeMins * 60 < time() - EnteredCurrentStatus ) || ( DiskUsage > 100000000 ) ) ) || ( ( JobStatus =?= 1 ) && ( time() > ( x509UserProxyExpiration + 86400 ) ) )", "CondorVersion": "$CondorVersion: 8.3.5 Apr 16 2015 BuildID: 315103 $", "MATCH_GLIDEIN_Gatekeeper": "ce302.cern.ch:8443/cream-lsf-grid_cms", "CRAB_Workflow": "150810_122536:kbutanov_crab_25ns_
 WJetsToLNu_HT600_800", "NumCkpts_RAW": 0, "MinHosts": 1, "RequestCpus": 1, "RemoteUserCpu": 163084.0d, "MATCH_EXP_JOB_GLIDEIN_Schedd": "schedd_glideins3@cmsgwms-factory.fnal.gov", "BytesSent": 119952.0d, "MATCH_GLIDEIN_Schedd": "schedd_glideins3@cmsgwms-factory.fnal.gov", "AutoClusterAttrs": "CheckpointPlatform,DESIRED_Gatekeepers,DESIRED_Sites,MaxWallTimeMins,RequestMemory,REQUIRED_OS,JobUniverse,LastCheckpointPlatform,NumCkpts,x509userproxyfirstfqan,x509userproxysubject,MachineLastMatchTime,DynamicSlot,PartitionableSlot,Slot1_ExpectedMachineGracefulDrainingCompletion,Slot1_JobStarts,Slot1_SelfMonitorAge,Slot1_TotalTimeClaimedBusy,Slot1_TotalTimeUnclaimedIdle,CMS_ALLOW_OVERFLOW,CRAB_UserRole,DESIRED_Overflow_Region,WMAgent_AgentName,CMSGroups,_condor_RequestCpus,_condor_RequestDisk,_condor_RequestMemory,RequestCpus,RequestDisk,WithinResourceLimits,opportunistic_job,ConcurrencyLimits,NiceUser,Rank,Requirements", "MATCH_GLIDEIN_SiteWMS_Queue": "grid_cms", "NumCkpts": 0, "JobFinishedH
 ookDone": 1440131525, "ImageSize": 4250000, "JobUniverse": 5, "EncryptExecuteDirectory": false, "MATCH_GLIDEIN_ToRetire": 1440530096, "CRAB_JobType": "analysis", "EnvDelim": ";", "ExitBySignal": false, "LastMatchTime": 1439965560, "OrigMaxHosts": 1, "MATCH_EXP_JOB_GLIDEIN_CMSSite": "T2_CH_CERN", "MATCH_EXP_Used_Gatekeeper": "ce302.cern.ch:8443/cream-lsf-grid_cms", "MATCH_EXP_JOB_GLIDEIN_Entry_Name": "CMS_T2_CH_CERN_ce302", "TerminationPending": true, "CRAB_UserHN": "kbutanov", "BlockReads": 0, "DAGManJobId": 1035690, "MATCH_GLIDEIN_SEs": "srm-eoscms.cern.ch", "MemoryUsage": "( ( ResidentSetSize + 1023 ) / 1024 )", "PeriodicRemoveReason": "ifThenElse(MemoryUsage > RequestMemory,\"Removed due to memory use\",ifThenElse(MaxWallTimeMins * 60 < time() - EnteredCurrentStatus,\"Removed due to wall clock limit\",ifThenElse(DiskUsage > 100000000,\"Removed due to disk usage\",ifThenElse(time() > x509UserProxyExpiration,\"Removed job due to proxy expiration\",\"Removed due to job being held\")
 )))", "JobNotification": 0, "AcctGroup": "analysis", "LocalUserCpu": 0.0d, "NumJobMatches": 1, "MATCH_GLIDEIN_Entry_Name": "CMS_T2_CH_CERN_ce302", "ExecutableSize_RAW": 6, "CRAB_SaveLogsFlag": 1, "UserLog": "/data/condor_local/spool/5690/0/cluster1035690.proc0.subproc0/job_log", "use_x509userproxy": true, "DAGManNodesMask": "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27", "MATCH_EXP_JOB_GLIDEIN_ToRetire": "1440530096", "LastJobLeaseRenewal": 1440131524, "AcctGroupUser": "uscms5111", "MATCH_EXP_JOB_GLIDEIN_Factory": "gfactory_service", "JOB_GLIDEIN_Job_Max_Time": "$$(GLIDEIN_Job_Max_Time:Unknown)", "TransferOutput": "jobReport.json.53", "x509UserProxyEmail": "khakimjan.butanov@cern.ch", "CRAB_localOutputFiles": "stepB_MC.root=stepB_MC_53.root", "CondorPlatform": "$CondorPlatform: X86_64-RedHat_6.6 $", "MATCH_GLIDEIN_Factory": "gfactory_service", "accounting_group": "analysis", "DAGNodeName": "Job53", "PeriodicRelease": "( HoldReasonCode == 28 ) || ( HoldReasonCode == 30 ) || ( HoldReasonCode
  == 13 ) || ( HoldReasonCode == 6 )", "CRAB_Publish": 1, "MATCH_EXP_JOB_GLIDEIN_ClusterId": "59069", "MATCH_GLIDEIN_MaxMemMBs": 2800, "JOB_GLIDEIN_SiteWMS_Slot": "$$(GLIDEIN_SiteWMS_Slot:Unknown)", "JOB_GLIDEIN_Entry_Name": "$$(GLIDEIN_Entry_Name:Unknown)", "TransferInput": "CMSRunAnalysis.sh,cmscp.py,CMSRunAnalysis.tar.gz,sandbox.tar.gz,run_and_lumis.tar.gz", "CRAB_NumAutomJobRetries": 2, "CommittedSuspensionTime": 0, "CRAB_DataBlock": "/WJetsToLNu_HT-600To800_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/RunIISpring15DR74-Asympt25ns_MCRUN2_74_V9-v2/MINIAODSIM#85bfee36-3b82-11e5-be34-001e67abf518", "MATCH_EXP_JOB_GLIDEIN_Name": "gfactory_instance", "StreamErr": false, "CRAB_ReqName": "150810_122536:kbutanov_crab_25ns_WJetsToLNu_HT600_800", "CMS_ALLOW_OVERFLOW": "True", "RecentBlockReadKbytes": 0, "WantRemoteSyscalls": false, "MATCH_GLIDEIN_Job_Max_Time": 34800, "NumShadowStarts": 1, "MachineAttrCpus0": 1, "DESIRED_OpSyses": "LINUX", "DAGManNodesLog": "/data/condor_local/spool/5690/0/clust
 er1035690.proc0.subproc0/RunJobs.dag.nodes.log", "Owner": "uscms5111", "PreJobPrio1": 1, "DiskUsage": 75000, "CRAB_ASOURL": "https://cmsweb.cern.ch/couchdb", "JobLeaseDuration": 1200, "x509UserProxyFirstFQAN": "/cms/Role=NULL/Capability=NULL", "Env": "CRAB_TASKMANAGER_TARBALL=local;SCRAM_ARCH=slc6_amd64_gcc491;CRAB_RUNTIME_TARBALL=local", "MaxWallTimeMins": 2800, "CRAB_SiteBlacklist": "{  }", "JOB_GLIDEIN_MaxMemMBs": "$$(GLIDEIN_MaxMemMBs:Unknown)", "RecentStatsLifetimeStarter": 1200, "CRAB_EDMOutputFiles": "{  }", "MATCH_GLIDEIN_CMSSite": "T2_CH_CERN", "Iwd": "/data/condor_local/spool/5690/0/cluster1035690.proc0.subproc0", "JOB_GLIDEIN_SEs": "$$(GLIDEIN_SEs:Unknown)", "StreamOut": false, "JobAdInformationAttrs": "MATCH_EXP_JOBGLIDEIN_CMSSite, JOBGLIDEIN_CMSSite, RemoteSysCpu, RemoteUserCpu", "StatsLifetimeStarter": 165949, "SubmitEventNotes": "DAG Node: Job53", "CRAB_UserDN": "/DC=ch/DC=cern/OU=Organic Units/OU=Users/CN=kbutanov/CN=727362/CN=Khakimjan Butanov", "CRAB_TFileOutputFil
 es": "{ \"stepB_MC.root\" }", "LeaveJobInQueue": false, "AutoClusterId": 16275, "StartdPrincipal": "execute-side@matchsession/128.142.45.103", "JOB_Gatekeeper": "ifthenelse(substr(Used_Gatekeeper,0,1) =!= \"$\",Used_Gatekeeper,ifthenelse(MATCH_GLIDEIN_Gatekeeper =!= undefined,MATCH_GLIDEIN_Gatekeeper,\"Unknown\"))", "MATCH_Memory": 2800, "MATCH_GLIDEIN_SiteWMS_Slot": "Unknown", "CRAB_StageoutPolicy": "local,remote", "OnExitRemove": true, "MaxWallTimeMins_RAW": 2800, "JobCurrentStartDate": 1439965560, "CRAB_ASOTimeout": 86400, "ExitStatus": 0, "JOB_GLIDECLIENT_Name": "$$(GLIDECLIENT_Name:Unknown)", "CRAB_Destination": "srm://cluster142.knu.ac.kr:8443/srm/managerv2?SFN=/pnfs/knu.ac.kr/data/cms/store/user/kbutanov/HWWwidthRun2/LatinoTrees_V4/WJetsToLNu_HT-600To800_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/crab_25ns_WJetsToLNu_HT600_800/150810_122536/0000/log/cmsRun_53.log.tar.gz, srm://cluster142.knu.ac.kr:8443/srm/managerv2?SFN=/pnfs/knu.ac.kr/data/cms/store/user/kbutanov/HWWwidthRun2/La
 tinoTrees_V4/WJetsToLNu_HT-600To800_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/crab_25ns_WJetsToLNu_HT600_800/150810_122536/0000/stepB_MC_53.root", "CRAB_DBSURL": "https://cmsweb.cern.ch/dbs/prod/global/DBSReader", "ResidentSetSize": 1250000, "x509UserProxyExpiration": 1440397268, "x509userproxy": "/data/condor_local/spool/5690/0/cluster1035690.proc0.subproc0/8123da6528ec4abd24562a99b4f2b0ec556bed0b", "JOB_GLIDEIN_SiteWMS_JobId": "$$(GLIDEIN_SiteWMS_JobId:Unknown)", "In": "/dev/null", "MATCH_GLIDEIN_SiteWMS": "LSF", "NumRestarts": 0, "DiskUsage_RAW": 61434, "JOB_Site": "$$(GLIDEIN_Site:Unknown)", "MATCH_GLIDEIN_SiteWMS_JobId": "689255460", "ResidentSetSize_RAW": 1238992, "x509userproxysubject": "/DC=ch/DC=cern/OU=Organic Units/OU=Users/CN=kbutanov/CN=727362/CN=Khakimjan Butanov", "JOBGLIDEIN_CMSSite": "$$([ifThenElse(GLIDEIN_CMSSite is undefined, \"Unknown\", GLIDEIN_CMSSite)])", "CRAB_ISB": "https://cmsweb.cern.ch/crabcache", "Cmd": "/data/condor_local/spool/5690/0/cluster1035690.proc0
 .subproc0/gWMS-CMSRunAnalysis.sh", "RequestDisk_RAW": 1, "CRAB_AdditionalOutputFiles": "{  }", "ShouldTransferFiles": "YES", "TargetType": "Machine", "MATCH_EXP_JOB_GLIDEIN_Job_Max_Time": "34800", "CRAB_UserRole": "undefined", "BlockWrites": 0, "NiceUser": false, "Out": "job_out.53", "ImageSize_RAW": 4095188, "JOB_CMSSite": "$$(GLIDEIN_CMSSite:Unknown)", "x509UserProxyVOName": "cms", "DESIRED_Overflow_Region": "regexps(\"T[12]_US_\",DESIRED_Sites,\"US\")", "CRAB_JobArch": "slc6_amd64_gcc491", "CumulativeSuspensionTime": 0, "JOB_GLIDEIN_Site": "$$(GLIDEIN_Site:Unknown)", "MATCH_EXP_JOB_GLIDEIN_Memory": "2800", "TransferIn": false, "MATCH_EXP_JOB_GLIDEIN_ProcId": "1", "CRAB_BlacklistT1": 0, "Err": "job_err.53", "RecentBlockWrites": 0, "RequestMemory_RAW": 2000, "ProcId": 0, "RecentBlockReads": 0, "TransferInputSizeMB": 2, "NumJobReconnects": 2, "SpooledOutputFiles": "jobReport.json.53", "MATCH_GLIDEIN_Site": "CERN", "BlockWriteKbytes": 0, "SpoolOnEvict": false, "WhenToTransferOutput":
  "ON_EXIT_OR_EVICT", "MATCH_GLIDECLIENT_Name": "CMSG-v1_0.main", "JobCurrentStartExecutingDate": 1439965573, "MATCH_GLIDEIN_ProcId": 1, "DESIRED_OpSysMajorVers": "6", "OnExitHold": false, "RequestMemory": 2000, "NumJobStarts": 1, "JOB_GLIDEIN_ToRetire": "$$(GLIDEIN_ToRetire:Unknown)", "JOB_GLIDEIN_Schedd": "$$(GLIDEIN_Schedd:Unknown)", "TotalSuspensions": 0, "ExitCode": 0, "CRAB_PublishName": "crab_25ns_WJetsToLNu_HT600_800-9da7f68dc2032d8626d7e7822bb10506", "CRAB_UserWebDir": "http://submit-5.t2.ucsd.edu/CSstoragePath/68/uscms5111/150810_122536:kbutanov_crab_25ns_WJetsToLNu_HT600_800", "BlockReadKbytes": 0, "AccountingGroup": "analysis.kbutanov", "MATCH_EXP_JOB_GLIDEIN_ToDie": "1440564896", "CRAB_PublishDBSURL": "https://cmsweb.cern.ch/dbs/prod/phys03/DBSWriter", "MATCH_EXP_JOB_GLIDEIN_Site": "CERN", "LastJobStatus": 2, "BufferBlockSize": 32768, "CommittedTime": 165965, "CRAB_Retry": 2, "LastSuspensionTime": 0, "MATCH_EXP_JOB_GLIDEIN_Max_Walltime": "603000", "TaskType": "Job", "Cum
 ulativeSlotTime": 165965.0d, "job_ad_information_attrs": "MATCH_GLIDEIN_Gatekeeper", "MachineAttrSlotWeight0": 1, "NumSystemHolds": 0, "CRAB_RetryOnASOFailures": 1, "Used_Gatekeeper": "$$(GLIDEIN_Gatekeeper:Unknown)", "BytesRecvd": 2128005.0d, "MATCH_EXP_JOB_GLIDEIN_SiteWMS_Slot": "Unknown", "JobRunCount": 1, "LastRemoteHost": "glidein_9757_931570227@b635ef6906.cern.ch", "JobPrio": 10, "CRAB_TransferOutputs": 1, "LocalSysCpu": 0.0d, "MATCH_EXP_JOB_GLIDEIN_SiteWMS_JobId": "689255460", "RemoteSysCpu": 1963.0d, "ExecutableSize": 7, "PeriodicHold": false, "CRAB_InputData": "/WJetsToLNu_HT-600To800_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/RunIISpring15DR74-Asympt25ns_MCRUN2_74_V9-v2/MINIAODSIM", "WantRemoteIO": true, "CRAB_JobSW": "CMSSW_7_4_7", "DAGParentNodeNames": "", "CRAB_SiteWhitelist": "{  }", "JOB_GLIDEIN_Max_Walltime": "$$(GLIDEIN_Max_Walltime:Unknown)", "JOB_GLIDEIN_ProcId": "$$(GLIDEIN_ProcId:Unknown)", "CompletionDate": 1440131525, "CRAB_RestHost": "cmsweb.cern.ch", "MATCH_EXP_
 JOB_GLIDEIN_SiteWMS_Queue": "grid_cms", "CRAB_oneEventMode": 0, "Requirements": "( ( ( target.IS_GLIDEIN =!= true ) || ( target.GLIDEIN_CMSSite =!= undefined ) ) && ( GLIDEIN_REQUIRED_OS =?= \"rhel6\" || OpSysMajorVer =?= 6 ) ) && ( ( Memory >= 1 ) && ( Disk >= 1 ) ) && ( TARGET.Arch == \"X86_64\" ) && ( TARGET.OpSys == \"LINUX\" ) && ( TARGET.Disk >= RequestDisk ) && ( TARGET.Memory >= RequestMemory ) && ( TARGET.HasFileTransfer )", "x509UserProxyFQAN": "/DC=ch/DC=cern/OU=Organic Units/OU=Users/CN=kbutanov/CN=727362/CN=Khakimjan Butanov,/cms/Role=NULL/Capability=NULL", "ClusterId": 1233705, "CRAB_PublishGroupName": 0, "BufferSize": 524288, "JOB_GLIDEIN_ClusterId": "$$(GLIDEIN_ClusterId:Unknown)", "CommittedSlotTime": 165965.0d, "DESIRED_Archs": "X86_64", "MATCH_GLIDEIN_ToDie": 1440564896, "LastPublicClaimId": "<128.142.45.103:55332>#1439963327#3#...", "CurrentHosts": 0, "QDate": 1439964847, "Arguments": "-a sandbox.tar.gz --sourceURL=https://cmsweb.cern.ch/crabcache --jobNumber=53 
 --cmsswVersion=CMSSW_7_4_7 --scramArch=slc6_amd64_gcc491 --inputFile=[\"/store/mc/RunIISpring15DR74/WJetsToLNu_HT-600To800_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v2/50000/6E2F932B-633B-E511-A7AE-F04DA23BCE4C.root\"] --runAndLumis=job_lumis_53.json --lheInputFiles=False --firstEvent=None --firstLumi=None --lastEvent=None --firstRun=None --seeding=AutomaticSeeding --scriptExe=None --eventsPerLumi=None --scriptArgs=[] -o {}", "MATCH_EXP_JOB_GLIDEIN_SEs": "srm-eoscms.cern.ch", "CRAB_Id": 53, "User": "uscms5111@cms", "JOB_GLIDEIN_Factory": "$$(GLIDEIN_Factory:Unknown)" }
-{ "GlobalJobId": "crab3-1@submit-5.t2.ucsd.edu#1235991.0#1440081300", "PostJobPrio1": -1439550850, "MATCH_EXP_JOB_GLIDECLIENT_Name": "CMSG-v1_0.overflow", "PostJobPrio2": 3, "JobStartDate": 1440081527, "CRAB_UserGroup": "undefined", "MATCH_EXP_JOB_GLIDEIN_MaxMemMBs": "2500", "MATCH_GLIDEIN_Name": "gfactory_instance", "JobStatus": 4, "JOB_GLIDEIN_CMSSite": "$$(GLIDEIN_CMSSite:Unknown)", "CRAB_TaskWorker": "vocms052", "MATCH_GLIDEIN_Max_Walltime": 603000, "JOB_GLIDEIN_SiteWMS": "$$(GLIDEIN_SiteWMS:Unknown)", "CoreSize": -1, "CRAB_AsyncDest": "T2_CH_CERN", "MATCH_EXP_JOB_Site": "Nebraska", "Rank": 0.0d, "JOB_GLIDEIN_Memory": "$$(Memory:Unknown)", "WantCheckpoint": false, "RemoteWallClockTime": 31976.0d, "JOB_GLIDEIN_Name": "$$(GLIDEIN_Name:Unknown)", "DESIRED_SITES": "T2_US_UCSD,T2_DE_DESY,T2_CH_CSCS,T2_US_MIT,T2_IT_Legnaro,T2_UK_London_Brunel,T2_CH_CERN,T2_UK_London_IC,T3_CH_PSI,T1_UK_RAL", "MaxHosts": 1, "MATCH_EXP_JOB_CMSSite": "T2_US_Nebraska", "RootDir": "/", "JOB_GLIDEIN_ToDie": 
 "$$(GLIDEIN_ToDie:Unknown)", "RecentBlockWriteKbytes": 0, "CRAB_Dest": "/store/temp/group/phys_b2g/BprimeKit_ntuple_747_1_MC/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/crab_QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/150814_111316", "JOB_GLIDEIN_SiteWMS_Queue": "$$(GLIDEIN_SiteWMS_Queue:Unknown)", "CRAB_RestURInoAPI": "/crabserver/prod", "EnteredCurrentStatus": 1440113503, "MATCH_EXP_JOBGLIDEIN_CMSSite": "T2_US_Nebraska", "RequestDisk": 100000, "MATCH_EXP_JOB_GLIDEIN_SiteWMS": "HTCondor", "MyType": "Job", "MATCH_GLIDEIN_ClusterId": 3043383, "PeriodicRemove": "( ( JobStatus =?= 5 ) && ( time() - EnteredCurrentStatus > 7 * 60 ) ) || ( ( JobStatus =?= 2 ) && ( ( MemoryUsage > RequestMemory ) || ( MaxWallTimeMins * 60 < time() - EnteredCurrentStatus ) || ( DiskUsage > 100000000 ) ) ) || ( ( JobStatus =?= 1 ) && ( time() > ( x509UserProxyExpiration + 86400 ) ) )", "CondorVersion": "$CondorVersion: 8.3.5 Apr 16 2015 BuildID: 315103 $", "MATCH_GLIDEIN_Gatekeeper": "red-gw1.unl.edu red-gw1.u
 nl.edu:9619", "CRAB_Workflow": "150814_111316:mrodozov_crab_QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8", "NumCkpts_RAW": 0, "MinHosts": 1, "RequestCpus": 1, "RemoteUserCpu": 27257.0d, "MATCH_EXP_JOB_GLIDEIN_Schedd": "schedd_glideins5@gfactory-1.t2.ucsd.edu", "BytesSent": 604821.0d, "MATCH_GLIDEIN_Schedd": "schedd_glideins5@gfactory-1.t2.ucsd.edu", "AutoClusterAttrs": "CheckpointPlatform,DESIRED_Gatekeepers,DESIRED_Sites,MaxWallTimeMins,RequestMemory,REQUIRED_OS,JobUniverse,LastCheckpointPlatform,NumCkpts,x509userproxyfirstfqan,x509userproxysubject,MachineLastMatchTime,DynamicSlot,PartitionableSlot,Slot1_ExpectedMachineGracefulDrainingCompletion,Slot1_JobStarts,Slot1_SelfMonitorAge,Slot1_TotalTimeClaimedBusy,Slot1_TotalTimeUnclaimedIdle,CMS_ALLOW_OVERFLOW,CRAB_UserRole,DESIRED_Overflow_Region,WMAgent_AgentName,CMSGroups,_condor_RequestCpus,_condor_RequestDisk,_condor_RequestMemory,RequestCpus,RequestDisk,WithinResourceLimits,opportunistic_job,ConcurrencyLimits,NiceUser,Rank,Requireme
 nts", "MATCH_GLIDEIN_SiteWMS_Queue": "red-gw1.unl.edu", "NumCkpts": 0, "JobFinishedHookDone": 1440113503, "ImageSize": 2000000, "JobUniverse": 5, "EncryptExecuteDirectory": false, "MATCH_GLIDEIN_ToRetire": 1440630710, "CRAB_JobType": "analysis", "EnvDelim": ";", "ExitBySignal": false, "LastMatchTime": 1440081527, "OrigMaxHosts": 1, "MATCH_EXP_JOB_GLIDEIN_CMSSite": "T2_US_Nebraska", "MATCH_EXP_Used_Gatekeeper": "red-gw1.unl.edu red-gw1.unl.edu:9619", "MATCH_EXP_JOB_GLIDEIN_Entry_Name": "CMS_T2_US_Nebraska_Red_gw1_long", "TerminationPending": true, "CRAB_UserHN": "mrodozov", "BlockReads": 0, "DAGManJobId": 1183604, "MATCH_GLIDEIN_SEs": "srm.unl.edu", "MemoryUsage": "( ( ResidentSetSize + 1023 ) / 1024 )", "PeriodicRemoveReason": "ifThenElse(MemoryUsage > RequestMemory,\"Removed due to memory use\",ifThenElse(MaxWallTimeMins * 60 < time() - EnteredCurrentStatus,\"Removed due to wall clock limit\",ifThenElse(DiskUsage > 100000000,\"Removed due to disk usage\",ifThenElse(time() > x509Use
 rProxyExpiration,\"Removed job due to proxy expiration\",\"Removed due to job being held\"))))", "JobNotification": 0, "AcctGroup": "analysis", "LocalUserCpu": 0.0d, "NumJobMatches": 1, "MATCH_GLIDEIN_Entry_Name": "CMS_T2_US_Nebraska_Red_gw1_long", "ExecutableSize_RAW": 6, "CRAB_SaveLogsFlag": 1, "UserLog": "/data/condor_local/spool/3604/0/cluster1183604.proc0.subproc0/job_log", "use_x509userproxy": true, "DAGManNodesMask": "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27", "MATCH_EXP_JOB_GLIDEIN_ToRetire": "1440630710", "LastJobLeaseRenewal": 1440113502, "AcctGroupUser": "uscms3850", "MATCH_EXP_JOB_GLIDEIN_Factory": "SDSC", "JOB_GLIDEIN_Job_Max_Time": "$$(GLIDEIN_Job_Max_Time:Unknown)", "TransferOutput": "jobReport.json.3", "x509UserProxyEmail": "mircho.nikolaev.rodozov@cern.ch", "CRAB_localOutputFiles": "results.root=results_3.root", "CondorPlatform": "$CondorPlatform: X86_64-RedHat_6.6 $", "MATCH_GLIDEIN_Factory": "SDSC", "accounting_group": "analysis", "DAGNodeName": "Job3", "PeriodicRele
 ase": "( HoldReasonCode == 28 ) || ( HoldReasonCode == 30 ) || ( HoldReasonCode == 13 ) || ( HoldReasonCode == 6 )", "CRAB_Publish": 1, "MATCH_EXP_JOB_GLIDEIN_ClusterId": "3043383", "MATCH_GLIDEIN_MaxMemMBs": 2500, "JOB_GLIDEIN_SiteWMS_Slot": "$$(GLIDEIN_SiteWMS_Slot:Unknown)", "JOB_GLIDEIN_Entry_Name": "$$(GLIDEIN_Entry_Name:Unknown)", "TransferInput": "CMSRunAnalysis.sh,cmscp.py,CMSRunAnalysis.tar.gz,sandbox.tar.gz,run_and_lumis.tar.gz", "CRAB_NumAutomJobRetries": 2, "CommittedSuspensionTime": 0, "CRAB_DataBlock": "/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/RunIISpring15DR74-Asympt25ns_MCRUN2_74_V9-v1/MINIAODSIM#242b435c-fc56-11e4-bda5-001e67abef8c", "MATCH_EXP_JOB_GLIDEIN_Name": "gfactory_instance", "StreamErr": false, "CRAB_ReqName": "150814_111316:mrodozov_crab_QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8", "CMS_ALLOW_OVERFLOW": "True", "RecentBlockReadKbytes": 0, "WantRemoteSyscalls": false, "MATCH_GLIDEIN_Job_Max_Time": 34800, "NumShadowStarts": 1, "MachineAttrCpus0": 1, "DESI
 RED_OpSyses": "LINUX", "DAGManNodesLog": "/data/condor_local/spool/3604/0/cluster1183604.proc0.subproc0/RunJobs.dag.nodes.log", "Owner": "uscms3850", "PreJobPrio1": 0, "DiskUsage": 4250000, "CRAB_ASOURL": "https://cmsweb.cern.ch/couchdb", "JobLeaseDuration": 1200, "x509UserProxyFirstFQAN": "/cms/Role=NULL/Capability=NULL", "Env": "CRAB_TASKMANAGER_TARBALL=local;SCRAM_ARCH=slc6_amd64_gcc491;CRAB_RUNTIME_TARBALL=local", "MaxWallTimeMins": 1400, "CRAB_SiteBlacklist": "{  }", "JOB_GLIDEIN_MaxMemMBs": "$$(GLIDEIN_MaxMemMBs:Unknown)", "RecentStatsLifetimeStarter": 1200, "CRAB_EDMOutputFiles": "{  }", "MATCH_GLIDEIN_CMSSite": "T2_US_Nebraska", "Iwd": "/data/condor_local/spool/3604/0/cluster1183604.proc0.subproc0", "JOB_GLIDEIN_SEs": "$$(GLIDEIN_SEs:Unknown)", "StreamOut": false, "JobAdInformationAttrs": "MATCH_EXP_JOBGLIDEIN_CMSSite, JOBGLIDEIN_CMSSite, RemoteSysCpu, RemoteUserCpu", "StatsLifetimeStarter": 31968, "SubmitEventNotes": "DAG Node: Job3", "CRAB_UserDN": "/DC=ch/DC=cern/OU=Organ
 ic Units/OU=Users/CN=mrodozov/CN=692532/CN=Mircho Nikolaev Rodozov", "CRAB_TFileOutputFiles": "{ \"results.root\" }", "LeaveJobInQueue": false, "AutoClusterId": 16278, "StartdPrincipal": "execute-side@matchsession/129.93.183.127", "JOB_Gatekeeper": "ifthenelse(substr(Used_Gatekeeper,0,1) =!= \"$\",Used_Gatekeeper,ifthenelse(MATCH_GLIDEIN_Gatekeeper =!= undefined,MATCH_GLIDEIN_Gatekeeper,\"Unknown\"))", "MATCH_Memory": 2500, "MATCH_GLIDEIN_SiteWMS_Slot": "slot1_32@red-d23n7.unl.edu", "CRAB_StageoutPolicy": "local,remote", "OnExitRemove": true, "MaxWallTimeMins_RAW": 1400, "JobCurrentStartDate": 1440081527, "CRAB_ASOTimeout": 86400, "ExitStatus": 0, "JOB_GLIDECLIENT_Name": "$$(GLIDECLIENT_Name:Unknown)", "CRAB_Destination": "srm://srm-eoscms.cern.ch:8443/srm/v2/server?SFN=/eos/cms/store/group/phys_b2g/BprimeKit_ntuple_747_1_MC/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/crab_QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/150814_111316/0000/log/cmsRun_3.log.tar.gz, srm://srm-eoscms.cern.ch:
 8443/srm/v2/server?SFN=/eos/cms/store/group/phys_b2g/BprimeKit_ntuple_747_1_MC/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/crab_QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/150814_111316/0000/results_3.root", "CRAB_DBSURL": "https://cmsweb.cern.ch/dbs/prod/global/DBSReader", "ResidentSetSize": 1250000, "x509UserProxyExpiration": 1440171330, "x509userproxy": "/data/condor_local/spool/3604/0/cluster1183604.proc0.subproc0/3adf46df379a2324bc159ae74f147ae01ca238c9", "JOB_GLIDEIN_SiteWMS_JobId": "$$(GLIDEIN_SiteWMS_JobId:Unknown)", "In": "/dev/null", "MATCH_GLIDEIN_SiteWMS": "HTCondor", "NumRestarts": 0, "DiskUsage_RAW": 4111436, "JOB_Site": "$$(GLIDEIN_Site:Unknown)", "MATCH_GLIDEIN_SiteWMS_JobId": "5096573.0", "ResidentSetSize_RAW": 1174388, "x509userproxysubject": "/DC=ch/DC=cern/OU=Organic Units/OU=Users/CN=mrodozov/CN=692532/CN=Mircho Nikolaev Rodozov", "JOBGLIDEIN_CMSSite": "$$([ifThenElse(GLIDEIN_CMSSite is undefined, \"Unknown\", GLIDEIN_CMSSite)])", "CRAB_ISB": "https://cmsweb.cern.
 ch/crabcache", "Cmd": "/data/condor_local/spool/3604/0/cluster1183604.proc0.subproc0/gWMS-CMSRunAnalysis.sh", "RequestDisk_RAW": 1, "CRAB_AdditionalOutputFiles": "{  }", "ShouldTransferFiles": "YES", "TargetType": "Machine", "MATCH_EXP_JOB_GLIDEIN_Job_Max_Time": "34800", "CRAB_UserRole": "undefined", "BlockWrites": 0, "NiceUser": false, "Out": "job_out.3", "ImageSize_RAW": 1756756, "JOB_CMSSite": "$$(GLIDEIN_CMSSite:Unknown)", "x509UserProxyVOName": "cms", "DESIRED_Overflow_Region": "regexps(\"T[12]_US_\",DESIRED_Sites,\"US\")", "CRAB_JobArch": "slc6_amd64_gcc491", "CumulativeSuspensionTime": 0, "JOB_GLIDEIN_Site": "$$(GLIDEIN_Site:Unknown)", "MATCH_EXP_JOB_GLIDEIN_Memory": "2500", "TransferIn": false, "MATCH_EXP_JOB_GLIDEIN_ProcId": "14", "CRAB_BlacklistT1": 0, "Err": "job_err.3", "RecentBlockWrites": 0, "RequestMemory_RAW": 2000, "ProcId": 0, "RecentBlockReads": 0, "TransferInputSizeMB": 42, "SpooledOutputFiles": "jobReport.json.3", "MATCH_GLIDEIN_Site": "Nebraska", "BlockWriteKby
 tes": 0, "SpoolOnEvict": false, "WhenToTransferOutput": "ON_EXIT_OR_EVICT", "MATCH_GLIDECLIENT_Name": "CMSG-v1_0.overflow", "JobCurrentStartExecutingDate": 1440081533, "MATCH_GLIDEIN_ProcId": 14, "DESIRED_OpSysMajorVers": "6", "OnExitHold": false, "RequestMemory": 2000, "NumJobStarts": 1, "JOB_GLIDEIN_ToRetire": "$$(GLIDEIN_ToRetire:Unknown)", "JOB_GLIDEIN_Schedd": "$$(GLIDEIN_Schedd:Unknown)", "TotalSuspensions": 0, "ExitCode": 0, "CRAB_PublishName": "crab_QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8-025cf8039fdddfc0e0037d5a7ca660ac", "CRAB_UserWebDir": "http://submit-5.t2.ucsd.edu/CSstoragePath/54/uscms3850/150814_111316:mrodozov_crab_QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8", "BlockReadKbytes": 0, "AccountingGroup": "analysis.mrodozov", "MATCH_EXP_JOB_GLIDEIN_ToDie": "1440665510", "CRAB_PublishDBSURL": "https://cmsweb.cern.ch/dbs/prod/phys03/DBSWriter", "MATCH_EXP_JOB_GLIDEIN_Site": "Nebraska", "LastJobStatus": 2, "BufferBlockSize": 32768, "CommittedTime": 31976, "CRAB_Retry": 3,
  "LastSuspensionTime": 0, "MATCH_EXP_JOB_GLIDEIN_Max_Walltime": "603000", "TaskType": "Job", "CumulativeSlotTime": 31976.0d, "job_ad_information_attrs": "MATCH_GLIDEIN_Gatekeeper", "MachineAttrSlotWeight0": 1, "NumSystemHolds": 0, "CRAB_RetryOnASOFailures": 1, "Used_Gatekeeper": "$$(GLIDEIN_Gatekeeper:Unknown)", "BytesRecvd": 4.4879356E7d, "MATCH_EXP_JOB_GLIDEIN_SiteWMS_Slot": "slot1_32@red-d23n7.unl.edu", "JobRunCount": 1, "LastRemoteHost": "glidein_11321_920434792@red-d23n7.unl.edu", "JobPrio": 10, "CRAB_TransferOutputs": 1, "LocalSysCpu": 0.0d, "MATCH_EXP_JOB_GLIDEIN_SiteWMS_JobId": "5096573.0", "RemoteSysCpu": 621.0d, "ExecutableSize": 7, "PeriodicHold": false, "CRAB_InputData": "/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/RunIISpring15DR74-Asympt25ns_MCRUN2_74_V9-v1/MINIAODSIM", "WantRemoteIO": true, "CRAB_JobSW": "CMSSW_7_4_7_patch2", "CRAB_SiteWhitelist": "{  }", "JOB_GLIDEIN_Max_Walltime": "$$(GLIDEIN_Max_Walltime:Unknown)", "JOB_GLIDEIN_ProcId": "$$(GLIDEIN_ProcId:Unknown)"
 , "CompletionDate": 1440113503, "CRAB_RestHost": "cmsweb.cern.ch", "MATCH_EXP_JOB_GLIDEIN_SiteWMS_Queue": "red-gw1.unl.edu", "CRAB_oneEventMode": 0, "Requirements": "( ( ( target.IS_GLIDEIN =!= true ) || ( target.GLIDEIN_CMSSite =!= undefined ) ) && ( GLIDEIN_REQUIRED_OS =?= \"rhel6\" || OpSysMajorVer =?= 6 ) ) && ( ( Memory >= 1 ) && ( Disk >= 1 ) ) && ( TARGET.Arch == \"X86_64\" ) && ( TARGET.OpSys == \"LINUX\" ) && ( TARGET.Disk >= RequestDisk ) && ( TARGET.Memory >= RequestMemory ) && ( TARGET.HasFileTransfer )", "x509UserProxyFQAN": "/DC=ch/DC=cern/OU=Organic Units/OU=Users/CN=mrodozov/CN=692532/CN=Mircho Nikolaev Rodozov,/cms/Role=NULL/Capability=NULL", "ClusterId": 1235991, "CRAB_PublishGroupName": 0, "BufferSize": 524288, "JOB_GLIDEIN_ClusterId": "$$(GLIDEIN_ClusterId:Unknown)", "CommittedSlotTime": 31976.0d, "DESIRED_Archs": "X86_64", "MATCH_GLIDEIN_ToDie": 1440665510, "LastPublicClaimId": "<129.93.183.127:56441>#1440063351#7#...", "CurrentHosts": 0, "QDate": 1440081300, "A
 rguments": "-a sandbox.tar.gz --sourceURL=https://cmsweb.cern.ch/crabcache --jobNumber=3 --cmsswVersion=CMSSW_7_4_7_patch2 --scramArch=slc6_amd64_gcc491 --inputFile=[\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/60000/06DE7D5F-D4FB-E411-9C85-00261894394A.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/80000/E29E093E-54FC-E411-8AE5-0025905A60FE.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/50000/F0FDF730-EDFB-E411-842B-00261834B51D.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/70000/FECCF363-F5FB-E411-85A3-002590DBDFE0.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/50000/0E4CEBFE-ECFB-E411-9F0C-842B2B29273C.root\",' '\"/store/mc
 /RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/80000/ECF66DCC-F0FB-E411-84CF-00259074AE32.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/60000/96F29C69-D4FB-E411-9028-842B2B292627.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/50000/6E887F0F-EDFB-E411-875B-BCAEC54B303A.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/70000/0C788712-F5FB-E411-AA0E-AC853D9DAC29.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/70000/723A41AE-F4FB-E411-BAA3-0025905C431A.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/70000/DA4EA0F5-F4FB-E411-B2AD-00259073E31C.root\",' '\"/store/mc/RunIISpring1
 5DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/70000/98C8F097-F7FB-E411-9A1F-52540006FB8D.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/80000/DE4F8235-5FFC-E411-80CD-0025905A6088.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/70000/EA5D6151-F5FB-E411-99F0-0026B92E0C74.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/70000/205D0CF9-F4FB-E411-934D-000F532734AC.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/80000/24BCAED9-F0FB-E411-A35B-00259074AE54.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/70000/A4C160C1-F4FB-E411-A66D-B083FED76C6C.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_
 300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/80000/E86B536C-54FC-E411-8787-AC853D9DACE1.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/50000/2E68E42D-EDFB-E411-8027-001E67397CC9.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/50000/A056D12B-EDFB-E411-9E51-52540006FDD6.root\"] --runAndLumis=job_lumis_3.json --lheInputFiles=False --firstEvent=None --firstLumi=None --lastEvent=None --firstRun=None --seeding=AutomaticSeeding --scriptExe=None --eventsPerLumi=None --scriptArgs=[] -o {}", "MATCH_EXP_JOB_GLIDEIN_SEs": "srm.unl.edu", "CRAB_Id": 3, "User": "uscms3850@cms", "JOB_GLIDEIN_Factory": "$$(GLIDEIN_Factory:Unknown)" }
-{ "GlobalJobId": "crab3-1@submit-5.t2.ucsd.edu#1235992.0#1440081300", "PostJobPrio1": -1439550850, "MATCH_EXP_JOB_GLIDECLIENT_Name": "CMSG-v1_0.overflow", "PostJobPrio2": 3, "JobStartDate": 1440081782, "CRAB_UserGroup": "undefined", "MATCH_EXP_JOB_GLIDEIN_MaxMemMBs": "2500", "MATCH_GLIDEIN_Name": "gfactory_instance", "JobStatus": 4, "JOB_GLIDEIN_CMSSite": "$$(GLIDEIN_CMSSite:Unknown)", "CRAB_TaskWorker": "vocms052", "MATCH_GLIDEIN_Max_Walltime": 603000, "JOB_GLIDEIN_SiteWMS": "$$(GLIDEIN_SiteWMS:Unknown)", "CoreSize": -1, "CRAB_AsyncDest": "T2_CH_CERN", "MATCH_EXP_JOB_Site": "Nebraska", "Rank": 0.0d, "JOB_GLIDEIN_Memory": "$$(Memory:Unknown)", "WantCheckpoint": false, "RemoteWallClockTime": 33360.0d, "JOB_GLIDEIN_Name": "$$(GLIDEIN_Name:Unknown)", "DESIRED_SITES": "T2_US_UCSD,T2_DE_DESY,T2_CH_CSCS,T2_US_MIT,T2_IT_Legnaro,T2_UK_London_Brunel,T2_CH_CERN,T2_UK_London_IC,T3_CH_PSI,T1_UK_RAL", "MaxHosts": 1, "MATCH_EXP_JOB_CMSSite": "T2_US_Nebraska", "RootDir": "/", "JOB_GLIDEIN_ToDie": 
 "$$(GLIDEIN_ToDie:Unknown)", "RecentBlockWriteKbytes": 0, "CRAB_Dest": "/store/temp/group/phys_b2g/BprimeKit_ntuple_747_1_MC/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/crab_QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/150814_111316", "JOB_GLIDEIN_SiteWMS_Queue": "$$(GLIDEIN_SiteWMS_Queue:Unknown)", "CRAB_RestURInoAPI": "/crabserver/prod", "EnteredCurrentStatus": 1440115142, "MATCH_EXP_JOBGLIDEIN_CMSSite": "T2_US_Nebraska", "RequestDisk": 100000, "MATCH_EXP_JOB_GLIDEIN_SiteWMS": "HTCondor", "MyType": "Job", "MATCH_GLIDEIN_ClusterId": 2561111, "PeriodicRemove": "( ( JobStatus =?= 5 ) && ( time() - EnteredCurrentStatus > 7 * 60 ) ) || ( ( JobStatus =?= 2 ) && ( ( MemoryUsage > RequestMemory ) || ( MaxWallTimeMins * 60 < time() - EnteredCurrentStatus ) || ( DiskUsage > 100000000 ) ) ) || ( ( JobStatus =?= 1 ) && ( time() > ( x509UserProxyExpiration + 86400 ) ) )", "CondorVersion": "$CondorVersion: 8.3.5 Apr 16 2015 BuildID: 315103 $", "MATCH_GLIDEIN_Gatekeeper": "red.unl.edu red.unl.edu:9
 619", "CRAB_Workflow": "150814_111316:mrodozov_crab_QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8", "NumCkpts_RAW": 0, "MinHosts": 1, "RequestCpus": 1, "RemoteUserCpu": 28513.0d, "MATCH_EXP_JOB_GLIDEIN_Schedd": "schedd_glideins6@glidein.grid.iu.edu", "BytesSent": 597241.0d, "MATCH_GLIDEIN_Schedd": "schedd_glideins6@glidein.grid.iu.edu", "AutoClusterAttrs": "CheckpointPlatform,DESIRED_Gatekeepers,DESIRED_Sites,MaxWallTimeMins,RequestMemory,REQUIRED_OS,JobUniverse,LastCheckpointPlatform,NumCkpts,x509userproxyfirstfqan,x509userproxysubject,MachineLastMatchTime,DynamicSlot,PartitionableSlot,Slot1_ExpectedMachineGracefulDrainingCompletion,Slot1_JobStarts,Slot1_SelfMonitorAge,Slot1_TotalTimeClaimedBusy,Slot1_TotalTimeUnclaimedIdle,CMS_ALLOW_OVERFLOW,CRAB_UserRole,DESIRED_Overflow_Region,WMAgent_AgentName,CMSGroups,_condor_RequestCpus,_condor_RequestDisk,_condor_RequestMemory,RequestCpus,RequestDisk,WithinResourceLimits,opportunistic_job,ConcurrencyLimits,NiceUser,Rank,Requirements", "MATCH_G
 LIDEIN_SiteWMS_Queue": "red.unl.edu", "NumCkpts": 0, "JobFinishedHookDone": 1440115142, "ImageSize": 1750000, "JobUniverse": 5, "EncryptExecuteDirectory": false, "MATCH_GLIDEIN_ToRetire": 1440616411, "CRAB_JobType": "analysis", "EnvDelim": ";", "ExitBySignal": false, "LastMatchTime": 1440081782, "OrigMaxHosts": 1, "MATCH_EXP_JOB_GLIDEIN_CMSSite": "T2_US_Nebraska", "MATCH_EXP_Used_Gatekeeper": "red.unl.edu red.unl.edu:9619", "MATCH_EXP_JOB_GLIDEIN_Entry_Name": "CMS_T2_US_Nebraska_Red_long", "TerminationPending": true, "CRAB_UserHN": "mrodozov", "BlockReads": 0, "DAGManJobId": 1183604, "MATCH_GLIDEIN_SEs": "srm.unl.edu", "MemoryUsage": "( ( ResidentSetSize + 1023 ) / 1024 )", "PeriodicRemoveReason": "ifThenElse(MemoryUsage > RequestMemory,\"Removed due to memory use\",ifThenElse(MaxWallTimeMins * 60 < time() - EnteredCurrentStatus,\"Removed due to wall clock limit\",ifThenElse(DiskUsage > 100000000,\"Removed due to disk usage\",ifThenElse(time() > x509UserProxyExpiration,\"Removed job
  due to proxy expiration\",\"Removed due to job being held\"))))", "JobNotification": 0, "AcctGroup": "analysis", "LocalUserCpu": 0.0d, "NumJobMatches": 1, "MATCH_GLIDEIN_Entry_Name": "CMS_T2_US_Nebraska_Red_long", "ExecutableSize_RAW": 6, "CRAB_SaveLogsFlag": 1, "UserLog": "/data/condor_local/spool/3604/0/cluster1183604.proc0.subproc0/job_log", "use_x509userproxy": true, "DAGManNodesMask": "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27", "MATCH_EXP_JOB_GLIDEIN_ToRetire": "1440616411", "LastJobLeaseRenewal": 1440115142, "AcctGroupUser": "uscms3850", "MATCH_EXP_JOB_GLIDEIN_Factory": "OSGGOC", "JOB_GLIDEIN_Job_Max_Time": "$$(GLIDEIN_Job_Max_Time:Unknown)", "TransferOutput": "jobReport.json.4", "x509UserProxyEmail": "mircho.nikolaev.rodozov@cern.ch", "CRAB_localOutputFiles": "results.root=results_4.root", "CondorPlatform": "$CondorPlatform: X86_64-RedHat_6.6 $", "MATCH_GLIDEIN_Factory": "OSGGOC", "accounting_group": "analysis", "DAGNodeName": "Job4", "PeriodicRelease": "( HoldReasonCode == 28 
 ) || ( HoldReasonCode == 30 ) || ( HoldReasonCode == 13 ) || ( HoldReasonCode == 6 )", "CRAB_Publish": 1, "MATCH_EXP_JOB_GLIDEIN_ClusterId": "2561111", "MATCH_GLIDEIN_MaxMemMBs": 2500, "JOB_GLIDEIN_SiteWMS_Slot": "$$(GLIDEIN_SiteWMS_Slot:Unknown)", "JOB_GLIDEIN_Entry_Name": "$$(GLIDEIN_Entry_Name:Unknown)", "TransferInput": "CMSRunAnalysis.sh,cmscp.py,CMSRunAnalysis.tar.gz,sandbox.tar.gz,run_and_lumis.tar.gz", "CRAB_NumAutomJobRetries": 2, "CommittedSuspensionTime": 0, "CRAB_DataBlock": "/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/RunIISpring15DR74-Asympt25ns_MCRUN2_74_V9-v1/MINIAODSIM#242b435c-fc56-11e4-bda5-001e67abef8c", "MATCH_EXP_JOB_GLIDEIN_Name": "gfactory_instance", "StreamErr": false, "CRAB_ReqName": "150814_111316:mrodozov_crab_QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8", "CMS_ALLOW_OVERFLOW": "True", "RecentBlockReadKbytes": 0, "WantRemoteSyscalls": false, "MATCH_GLIDEIN_Job_Max_Time": 34800, "NumShadowStarts": 1, "MachineAttrCpus0": 1, "DESIRED_OpSyses": "LINUX", "DAGMan
 NodesLog": "/data/condor_local/spool/3604/0/cluster1183604.proc0.subproc0/RunJobs.dag.nodes.log", "Owner": "uscms3850", "PreJobPrio1": 0, "DiskUsage": 3750000, "CRAB_ASOURL": "https://cmsweb.cern.ch/couchdb", "JobLeaseDuration": 1200, "x509UserProxyFirstFQAN": "/cms/Role=NULL/Capability=NULL", "Env": "CRAB_TASKMANAGER_TARBALL=local;SCRAM_ARCH=slc6_amd64_gcc491;CRAB_RUNTIME_TARBALL=local", "MaxWallTimeMins": 1400, "CRAB_SiteBlacklist": "{  }", "JOB_GLIDEIN_MaxMemMBs": "$$(GLIDEIN_MaxMemMBs:Unknown)", "RecentStatsLifetimeStarter": 1200, "CRAB_EDMOutputFiles": "{  }", "MATCH_GLIDEIN_CMSSite": "T2_US_Nebraska", "Iwd": "/data/condor_local/spool/3604/0/cluster1183604.proc0.subproc0", "JOB_GLIDEIN_SEs": "$$(GLIDEIN_SEs:Unknown)", "StreamOut": false, "JobAdInformationAttrs": "MATCH_EXP_JOBGLIDEIN_CMSSite, JOBGLIDEIN_CMSSite, RemoteSysCpu, RemoteUserCpu", "StatsLifetimeStarter": 33352, "SubmitEventNotes": "DAG Node: Job4", "CRAB_UserDN": "/DC=ch/DC=cern/OU=Organic Units/OU=Users/CN=mrodozov/
 CN=692532/CN=Mircho Nikolaev Rodozov", "CRAB_TFileOutputFiles": "{ \"results.root\" }", "LeaveJobInQueue": false, "AutoClusterId": 16278, "StartdPrincipal": "execute-side@matchsession/129.93.182.12", "JOB_Gatekeeper": "ifthenelse(substr(Used_Gatekeeper,0,1) =!= \"$\",Used_Gatekeeper,ifthenelse(MATCH_GLIDEIN_Gatekeeper =!= undefined,MATCH_GLIDEIN_Gatekeeper,\"Unknown\"))", "MATCH_Memory": 2500, "MATCH_GLIDEIN_SiteWMS_Slot": "slot1_6@red-d8n12.unl.edu", "CRAB_StageoutPolicy": "local,remote", "OnExitRemove": true, "MaxWallTimeMins_RAW": 1400, "JobCurrentStartDate": 1440081782, "CRAB_ASOTimeout": 86400, "ExitStatus": 0, "JOB_GLIDECLIENT_Name": "$$(GLIDECLIENT_Name:Unknown)", "CRAB_Destination": "srm://srm-eoscms.cern.ch:8443/srm/v2/server?SFN=/eos/cms/store/group/phys_b2g/BprimeKit_ntuple_747_1_MC/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/crab_QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/150814_111316/0000/log/cmsRun_4.log.tar.gz, srm://srm-eoscms.cern.ch:8443/srm/v2/server?SFN=/eos/cms/
 store/group/phys_b2g/BprimeKit_ntuple_747_1_MC/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/crab_QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/150814_111316/0000/results_4.root", "CRAB_DBSURL": "https://cmsweb.cern.ch/dbs/prod/global/DBSReader", "ResidentSetSize": 1250000, "x509UserProxyExpiration": 1440171330, "x509userproxy": "/data/condor_local/spool/3604/0/cluster1183604.proc0.subproc0/3adf46df379a2324bc159ae74f147ae01ca238c9", "JOB_GLIDEIN_SiteWMS_JobId": "$$(GLIDEIN_SiteWMS_JobId:Unknown)", "In": "/dev/null", "MATCH_GLIDEIN_SiteWMS": "HTCondor", "NumRestarts": 0, "DiskUsage_RAW": 3661158, "JOB_Site": "$$(GLIDEIN_Site:Unknown)", "MATCH_GLIDEIN_SiteWMS_JobId": "5092137.0", "ResidentSetSize_RAW": 1148372, "x509userproxysubject": "/DC=ch/DC=cern/OU=Organic Units/OU=Users/CN=mrodozov/CN=692532/CN=Mircho Nikolaev Rodozov", "JOBGLIDEIN_CMSSite": "$$([ifThenElse(GLIDEIN_CMSSite is undefined, \"Unknown\", GLIDEIN_CMSSite)])", "CRAB_ISB": "https://cmsweb.cern.ch/crabcache", "Cmd": "/data/con
 dor_local/spool/3604/0/cluster1183604.proc0.subproc0/gWMS-CMSRunAnalysis.sh", "RequestDisk_RAW": 1, "CRAB_AdditionalOutputFiles": "{  }", "ShouldTransferFiles": "YES", "TargetType": "Machine", "MATCH_EXP_JOB_GLIDEIN_Job_Max_Time": "34800", "CRAB_UserRole": "undefined", "BlockWrites": 0, "NiceUser": false, "Out": "job_out.4", "ImageSize_RAW": 1727056, "JOB_CMSSite": "$$(GLIDEIN_CMSSite:Unknown)", "x509UserProxyVOName": "cms", "DESIRED_Overflow_Region": "regexps(\"T[12]_US_\",DESIRED_Sites,\"US\")", "CRAB_JobArch": "slc6_amd64_gcc491", "CumulativeSuspensionTime": 0, "JOB_GLIDEIN_Site": "$$(GLIDEIN_Site:Unknown)", "MATCH_EXP_JOB_GLIDEIN_Memory": "2500", "TransferIn": false, "MATCH_EXP_JOB_GLIDEIN_ProcId": "8", "CRAB_BlacklistT1": 0, "Err": "job_err.4", "RecentBlockWrites": 0, "RequestMemory_RAW": 2000, "ProcId": 0, "RecentBlockReads": 0, "TransferInputSizeMB": 42, "SpooledOutputFiles": "jobReport.json.4", "MATCH_GLIDEIN_Site": "Nebraska", "BlockWriteKbytes": 0, "SpoolOnEvict": false, "
 WhenToTransferOutput": "ON_EXIT_OR_EVICT", "MATCH_GLIDECLIENT_Name": "CMSG-v1_0.overflow", "JobCurrentStartExecutingDate": 1440081789, "MATCH_GLIDEIN_ProcId": 8, "DESIRED_OpSysMajorVers": "6", "OnExitHold": false, "RequestMemory": 2000, "NumJobStarts": 1, "JOB_GLIDEIN_ToRetire": "$$(GLIDEIN_ToRetire:Unknown)", "JOB_GLIDEIN_Schedd": "$$(GLIDEIN_Schedd:Unknown)", "TotalSuspensions": 0, "ExitCode": 0, "CRAB_PublishName": "crab_QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8-025cf8039fdddfc0e0037d5a7ca660ac", "CRAB_UserWebDir": "http://submit-5.t2.ucsd.edu/CSstoragePath/54/uscms3850/150814_111316:mrodozov_crab_QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8", "BlockReadKbytes": 0, "AccountingGroup": "analysis.mrodozov", "MATCH_EXP_JOB_GLIDEIN_ToDie": "1440651211", "CRAB_PublishDBSURL": "https://cmsweb.cern.ch/dbs/prod/phys03/DBSWriter", "MATCH_EXP_JOB_GLIDEIN_Site": "Nebraska", "LastJobStatus": 2, "BufferBlockSize": 32768, "CommittedTime": 33360, "CRAB_Retry": 3, "LastSuspensionTime": 0, "MATCH_E
 XP_JOB_GLIDEIN_Max_Walltime": "603000", "TaskType": "Job", "CumulativeSlotTime": 33360.0d, "job_ad_information_attrs": "MATCH_GLIDEIN_Gatekeeper", "MachineAttrSlotWeight0": 1, "NumSystemHolds": 0, "CRAB_RetryOnASOFailures": 1, "Used_Gatekeeper": "$$(GLIDEIN_Gatekeeper:Unknown)", "BytesRecvd": 4.4879356E7d, "MATCH_EXP_JOB_GLIDEIN_SiteWMS_Slot": "slot1_6@red-d8n12.unl.edu", "JobRunCount": 1, "LastRemoteHost": "glidein_1936_57194584@red-d8n12.unl.edu", "JobPrio": 10, "CRAB_TransferOutputs": 1, "LocalSysCpu": 0.0d, "MATCH_EXP_JOB_GLIDEIN_SiteWMS_JobId": "5092137.0", "RemoteSysCpu": 616.0d, "ExecutableSize": 7, "PeriodicHold": false, "CRAB_InputData": "/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/RunIISpring15DR74-Asympt25ns_MCRUN2_74_V9-v1/MINIAODSIM", "WantRemoteIO": true, "CRAB_JobSW": "CMSSW_7_4_7_patch2", "CRAB_SiteWhitelist": "{  }", "JOB_GLIDEIN_Max_Walltime": "$$(GLIDEIN_Max_Walltime:Unknown)", "JOB_GLIDEIN_ProcId": "$$(GLIDEIN_ProcId:Unknown)", "CompletionDate": 1440115142, "CRAB
 _RestHost": "cmsweb.cern.ch", "MATCH_EXP_JOB_GLIDEIN_SiteWMS_Queue": "red.unl.edu", "CRAB_oneEventMode": 0, "Requirements": "( ( ( target.IS_GLIDEIN =!= true ) || ( target.GLIDEIN_CMSSite =!= undefined ) ) && ( GLIDEIN_REQUIRED_OS =?= \"rhel6\" || OpSysMajorVer =?= 6 ) ) && ( ( Memory >= 1 ) && ( Disk >= 1 ) ) && ( TARGET.Arch == \"X86_64\" ) && ( TARGET.OpSys == \"LINUX\" ) && ( TARGET.Disk >= RequestDisk ) && ( TARGET.Memory >= RequestMemory ) && ( TARGET.HasFileTransfer )", "x509UserProxyFQAN": "/DC=ch/DC=cern/OU=Organic Units/OU=Users/CN=mrodozov/CN=692532/CN=Mircho Nikolaev Rodozov,/cms/Role=NULL/Capability=NULL", "ClusterId": 1235992, "CRAB_PublishGroupName": 0, "BufferSize": 524288, "JOB_GLIDEIN_ClusterId": "$$(GLIDEIN_ClusterId:Unknown)", "CommittedSlotTime": 33360.0d, "DESIRED_Archs": "X86_64", "MATCH_GLIDEIN_ToDie": 1440651211, "LastPublicClaimId": "<129.93.182.12:42491>#1440048812#7#...", "CurrentHosts": 0, "QDate": 1440081300, "Arguments": "-a sandbox.tar.gz --sourceURL=
 https://cmsweb.cern.ch/crabcache --jobNumber=4 --cmsswVersion=CMSSW_7_4_7_patch2 --scramArch=slc6_amd64_gcc491 --inputFile=[\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/80000/9A89CA60-69FC-E411-9661-0025905C42B8.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/60000/34F8B66A-D4FB-E411-8F89-842B2B29273C.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/70000/7CE6B848-F5FB-E411-A605-0025905A60A8.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/80000/9E842AA8-54FC-E411-8BC7-000F53273500.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/60000/C44AD465-D4FB-E411-8704-002590200A40.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUE
 TP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/80000/90B6CB1B-07FD-E411-BD52-001E67397CBA.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/80000/183FB65F-69FC-E411-A5A8-0025904B7C26.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/50000/5A0A9A0E-EDFB-E411-B95F-00266CF330B8.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/80000/3E3768F1-61FC-E411-B163-002618943956.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/70000/08DB9DDE-F4FB-E411-9BC9-52540001DACD.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/80000/CE293F9B-54FC-E411-83E8-AC853D9DACD3.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_p
 ythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/80000/A4479F5F-69FC-E411-B0B5-0025904C6378.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/60000/0419455F-D4FB-E411-AEFA-00261894394A.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/80000/E6BD6C76-54FC-E411-A1F2-AC853D9DACD7.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/70000/AC15F863-F5FB-E411-8F07-002590DB9286.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/70000/CC9B7EE2-F4FB-E411-BCD9-52540001DACD.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/60000/5844575F-D4FB-E411-81F5-003048FFD732.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAO
 DSIM/Asympt25ns_MCRUN2_74_V9-v1/60000/6EC5205E-D4FB-E411-9885-001E67396BB7.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/80000/B63200E8-69FC-E411-B949-0025904C51FC.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/80000/14554A42-54FC-E411-86D2-0025905A605E.root\"] --runAndLumis=job_lumis_4.json --lheInputFiles=False --firstEvent=None --firstLumi=None --lastEvent=None --firstRun=None --seeding=AutomaticSeeding --scriptExe=None --eventsPerLumi=None --scriptArgs=[] -o {}", "MATCH_EXP_JOB_GLIDEIN_SEs": "srm.unl.edu", "CRAB_Id": 4, "User": "uscms3850@cms", "JOB_GLIDEIN_Factory": "$$(GLIDEIN_Factory:Unknown)" }

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d630d1a2/asterix-app/src/test/resources/runtimets/results/global-aggregate/q01/q01.1.adm
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/resources/runtimets/results/global-aggregate/q01/q01.1.adm b/asterix-app/src/test/resources/runtimets/results/global-aggregate/q01/q01.1.adm
deleted file mode 100644
index 8e357b8..0000000
--- a/asterix-app/src/test/resources/runtimets/results/global-aggregate/q01/q01.1.adm
+++ /dev/null
@@ -1 +0,0 @@
-{ "count": 10 }

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d630d1a2/asterix-app/src/test/resources/runtimets/results/global-aggregate/q02/q02.1.adm
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/resources/runtimets/results/global-aggregate/q02/q02.1.adm b/asterix-app/src/test/resources/runtimets/results/global-aggregate/q02/q02.1.adm
deleted file mode 100644
index 0d74afe..0000000
--- a/asterix-app/src/test/resources/runtimets/results/global-aggregate/q02/q02.1.adm
+++ /dev/null
@@ -1 +0,0 @@
-{ "foo": 1, "count": 10 }

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d630d1a2/asterix-app/src/test/resources/runtimets/results/global-aggregate/q08/q08.1.adm
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/resources/runtimets/results/global-aggregate/q08/q08.1.adm b/asterix-app/src/test/resources/runtimets/results/global-aggregate/q08/q08.1.adm
deleted file mode 100644
index e45bb55..0000000
--- a/asterix-app/src/test/resources/runtimets/results/global-aggregate/q08/q08.1.adm
+++ /dev/null
@@ -1,10 +0,0 @@
-{ "count": 4 }
-{ "count": 2 }
-{ "count": 4 }
-{ "count": 1 }
-{ "count": 3 }
-{ "count": 3 }
-{ "count": 1 }
-{ "count": 1 }
-{ "count": 2 }
-{ "count": 3 }

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d630d1a2/asterix-app/src/test/resources/runtimets/results/temp-dataset/temp_primary_plus_ngram_flush/temp_primary_plus_ngram_flush.3.adm
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/resources/runtimets/results/temp-dataset/temp_primary_plus_ngram_flush/temp_primary_plus_ngram_flush.3.adm b/asterix-app/src/test/resources/runtimets/results/temp-dataset/temp_primary_plus_ngram_flush/temp_primary_plus_ngram_flush.3.adm
deleted file mode 100644
index e37d32a..0000000
--- a/asterix-app/src/test/resources/runtimets/results/temp-dataset/temp_primary_plus_ngram_flush/temp_primary_plus_ngram_flush.3.adm
+++ /dev/null
@@ -1 +0,0 @@
-1000
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d630d1a2/asterix-app/src/test/resources/runtimets/results/tpch/q06_forecast_revenue_change/q06_forecast_revenue_change.2.adm
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/resources/runtimets/results/tpch/q06_forecast_revenue_change/q06_forecast_revenue_change.2.adm b/asterix-app/src/test/resources/runtimets/results/tpch/q06_forecast_revenue_change/q06_forecast_revenue_change.2.adm
deleted file mode 100644
index cebc6b5..0000000
--- a/asterix-app/src/test/resources/runtimets/results/tpch/q06_forecast_revenue_change/q06_forecast_revenue_change.2.adm
+++ /dev/null
@@ -1 +0,0 @@
-{ "count": 116 }

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d630d1a2/asterix-app/src/test/resources/runtimets/results_parser_sqlpp/tpch/q06_forecast_revenue_change/q06_forecast_revenue_change.4.ast
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/resources/runtimets/results_parser_sqlpp/tpch/q06_forecast_revenue_change/q06_forecast_revenue_change.4.ast b/asterix-app/src/test/resources/runtimets/results_parser_sqlpp/tpch/q06_forecast_revenue_change/q06_forecast_revenue_change.4.ast
deleted file mode 100644
index 420bd44..0000000
--- a/asterix-app/src/test/resources/runtimets/results_parser_sqlpp/tpch/q06_forecast_revenue_change/q06_forecast_revenue_change.4.ast
+++ /dev/null
@@ -1,73 +0,0 @@
-DataverseUse tpch
-Query:
-SELECT [
-FunctionCall tpch.count@1[
-  (
-    SELECT ELEMENT [
-    FieldAccessor [
-      Variable [ Name=#2 ]
-      Field=l
-    ]
-    ]
-    FROM [      Variable [ Name=#1 ]
-      AS
-      Variable [ Name=#2 ]
-    ]
-  )
-]
-count
-]
-FROM [  FunctionCall Metadata.dataset@1[
-    LiteralExpr [STRING] [LineItem]
-  ]
-  AS
-  Variable [ Name=$l ]
-]
-Where
-  OperatorExpr [
-    OperatorExpr [
-      FieldAccessor [
-        Variable [ Name=$l ]
-        Field=l_shipdate
-      ]
-      >=
-      LiteralExpr [STRING] [1994-01-01]
-    ]
-    and
-    OperatorExpr [
-      FieldAccessor [
-        Variable [ Name=$l ]
-        Field=l_shipdate
-      ]
-      <
-      LiteralExpr [STRING] [1995-01-01]
-    ]
-    and
-    OperatorExpr [
-      FieldAccessor [
-        Variable [ Name=$l ]
-        Field=l_discount
-      ]
-      >=
-      LiteralExpr [DOUBLE] [0.05]
-    ]
-    and
-    OperatorExpr [
-      FieldAccessor [
-        Variable [ Name=$l ]
-        Field=l_discount
-      ]
-      <=
-      LiteralExpr [DOUBLE] [0.07]
-    ]
-    and
-    OperatorExpr [
-      FieldAccessor [
-        Variable [ Name=$l ]
-        Field=l_quantity
-      ]
-      <
-      LiteralExpr [LONG] [24]
-    ]
-  ]
-Group All

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d630d1a2/asterix-external-data/src/main/java/org/apache/asterix/external/api/IStreamNotificationHandler.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/api/IStreamNotificationHandler.java b/asterix-external-data/src/main/java/org/apache/asterix/external/api/IStreamNotificationHandler.java
deleted file mode 100644
index 8b014ad..0000000
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/api/IStreamNotificationHandler.java
+++ /dev/null
@@ -1,28 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.asterix.external.api;
-
-public interface IStreamNotificationHandler {
-
-    /**
-     * Used to notify a handler that the stream is about to start reading data from a new source.
-     * An example use is by the parser to skip CSV file headers in case the stream reads from a set of files.
-     */
-    public void notifyNewSource();
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d630d1a2/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/IndexingStreamRecordReader.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/IndexingStreamRecordReader.java b/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/IndexingStreamRecordReader.java
deleted file mode 100644
index 6eee892..0000000
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/IndexingStreamRecordReader.java
+++ /dev/null
@@ -1,101 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.asterix.external.input.record.reader;
-
-import java.io.IOException;
-import java.util.List;
-
-import org.apache.asterix.external.api.IExternalIndexer;
-import org.apache.asterix.external.api.IIndexingDatasource;
-import org.apache.asterix.external.api.IRawRecord;
-import org.apache.asterix.external.api.IRecordReader;
-import org.apache.asterix.external.dataflow.AbstractFeedDataFlowController;
-import org.apache.asterix.external.indexing.ExternalFile;
-import org.apache.asterix.external.input.record.reader.stream.StreamRecordReader;
-import org.apache.asterix.external.util.FeedLogManager;
-import org.apache.hadoop.io.Writable;
-import org.apache.hadoop.mapred.RecordReader;
-import org.apache.hyracks.api.exceptions.HyracksDataException;
-
-public class IndexingStreamRecordReader implements IRecordReader<char[]>, IIndexingDatasource {
-
-    private StreamRecordReader reader;
-    private IExternalIndexer indexer;
-
-    public IndexingStreamRecordReader(StreamRecordReader reader, IExternalIndexer indexer) {
-        this.reader = reader;
-        this.indexer = indexer;
-    }
-
-    @Override
-    public void close() throws IOException {
-        reader.close();
-    }
-
-    @Override
-    public IExternalIndexer getIndexer() {
-        return indexer;
-    }
-
-    @Override
-    public boolean hasNext() throws Exception {
-        return reader.hasNext();
-    }
-
-    @Override
-    public IRawRecord<char[]> next() throws IOException, InterruptedException {
-        return reader.next();
-    }
-
-    @Override
-    public boolean stop() {
-        return reader.stop();
-    }
-
-    @Override
-    public void setController(AbstractFeedDataFlowController controller) {
-        reader.setController(controller);
-    }
-
-    @Override
-    public void setFeedLogManager(FeedLogManager feedLogManager) throws HyracksDataException {
-        reader.setFeedLogManager(feedLogManager);
-    }
-
-    @Override
-    public List<ExternalFile> getSnapshot() {
-        return null;
-    }
-
-    @Override
-    public int getCurrentSplitIndex() {
-        return -1;
-    }
-
-    @Override
-    public RecordReader<?, ? extends Writable> getReader() {
-        return null;
-    }
-
-    @Override
-    public boolean handleException(Throwable th) {
-        return reader.handleException(th);
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d630d1a2/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/stream/StreamRecordReader.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/stream/StreamRecordReader.java b/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/stream/StreamRecordReader.java
deleted file mode 100644
index 7dc5bce..0000000
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/stream/StreamRecordReader.java
+++ /dev/null
@@ -1,98 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.asterix.external.input.record.reader.stream;
-
-import java.io.IOException;
-
-import org.apache.asterix.external.api.AsterixInputStream;
-import org.apache.asterix.external.api.IRawRecord;
-import org.apache.asterix.external.api.IRecordReader;
-import org.apache.asterix.external.api.IStreamNotificationHandler;
-import org.apache.asterix.external.dataflow.AbstractFeedDataFlowController;
-import org.apache.asterix.external.input.record.CharArrayRecord;
-import org.apache.asterix.external.input.stream.AsterixInputStreamReader;
-import org.apache.asterix.external.util.ExternalDataConstants;
-import org.apache.asterix.external.util.FeedLogManager;
-import org.apache.commons.lang.mutable.MutableBoolean;
-import org.apache.hyracks.api.exceptions.HyracksDataException;
-
-public abstract class StreamRecordReader implements IRecordReader<char[]>, IStreamNotificationHandler {
-    protected final AsterixInputStreamReader reader;
-    protected CharArrayRecord record;
-    protected char[] inputBuffer;
-    protected int bufferLength = 0;
-    protected int bufferPosn = 0;
-    protected boolean done = false;
-    protected FeedLogManager feedLogManager;
-    protected MutableBoolean newFile = new MutableBoolean(false);
-
-    public StreamRecordReader(AsterixInputStream inputStream) {
-        this.reader = new AsterixInputStreamReader(inputStream);
-        record = new CharArrayRecord();
-        inputBuffer = new char[ExternalDataConstants.DEFAULT_BUFFER_SIZE];
-    }
-
-    @Override
-    public IRawRecord<char[]> next() throws IOException {
-        return record;
-    }
-
-    @Override
-    public void close() throws IOException {
-        if (!done) {
-            reader.close();
-        }
-        done = true;
-    }
-
-    @Override
-    public boolean stop() {
-        try {
-            reader.stop();
-            return true;
-        } catch (Exception e) {
-            e.printStackTrace();
-            return false;
-        }
-    }
-
-    @Override
-    public abstract boolean hasNext() throws IOException;
-
-    @Override
-    public void setFeedLogManager(FeedLogManager feedLogManager) throws HyracksDataException {
-        this.feedLogManager = feedLogManager;
-        reader.setFeedLogManager(feedLogManager);
-    }
-
-    @Override
-    public void setController(AbstractFeedDataFlowController controller) {
-        reader.setController(controller);
-    }
-
-    @Override
-    public boolean handleException(Throwable th) {
-        return reader.handleException(th);
-    }
-
-    @Override
-    public void notifyNewSource() {
-        throw new UnsupportedOperationException();
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d630d1a2/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/stream/StreamRecordReaderFactory.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/stream/StreamRecordReaderFactory.java b/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/stream/StreamRecordReaderFactory.java
deleted file mode 100644
index f743a3f..0000000
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/stream/StreamRecordReaderFactory.java
+++ /dev/null
@@ -1,72 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.asterix.external.input.record.reader.stream;
-
-import java.util.Map;
-
-import org.apache.asterix.common.exceptions.AsterixException;
-import org.apache.asterix.external.api.IInputStreamFactory;
-import org.apache.asterix.external.api.IRecordReader;
-import org.apache.asterix.external.api.IRecordReaderFactory;
-import org.apache.asterix.external.provider.StreamRecordReaderProvider;
-import org.apache.asterix.external.provider.StreamRecordReaderProvider.Format;
-import org.apache.hyracks.algebricks.common.constraints.AlgebricksAbsolutePartitionConstraint;
-import org.apache.hyracks.api.context.IHyracksTaskContext;
-import org.apache.hyracks.api.exceptions.HyracksDataException;
-
-public class StreamRecordReaderFactory implements IRecordReaderFactory<char[]> {
-
-    private static final long serialVersionUID = 1L;
-    protected final IInputStreamFactory streamFactory;
-    protected Map<String, String> configuration;
-    protected Format format;
-
-    public StreamRecordReaderFactory(IInputStreamFactory inputStreamFactory) {
-        this.streamFactory = inputStreamFactory;
-    }
-
-    @Override
-    public DataSourceType getDataSourceType() {
-        return DataSourceType.RECORDS;
-    }
-
-    @Override
-    public Class<?> getRecordClass() {
-        return char[].class;
-    }
-
-    @Override
-    public AlgebricksAbsolutePartitionConstraint getPartitionConstraint() throws AsterixException {
-        return streamFactory.getPartitionConstraint();
-    }
-
-    @Override
-    public void configure(Map<String, String> configuration) throws AsterixException {
-        this.configuration = configuration;
-        streamFactory.configure(configuration);
-        format = StreamRecordReaderProvider.getReaderFormat(configuration);
-    }
-
-    @Override
-    public IRecordReader<? extends char[]> createRecordReader(IHyracksTaskContext ctx, int partition)
-            throws HyracksDataException {
-        return StreamRecordReaderProvider.createRecordReader(format, streamFactory.createInputStream(ctx, partition),
-                configuration);
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/d630d1a2/asterix-external-data/src/main/java/org/apache/asterix/external/provider/StreamRecordReaderProvider.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/provider/StreamRecordReaderProvider.java b/asterix-external-data/src/main/java/org/apache/asterix/external/provider/StreamRecordReaderProvider.java
deleted file mode 100644
index ea8bc98..0000000
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/provider/StreamRecordReaderProvider.java
+++ /dev/null
@@ -1,81 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.asterix.external.provider;
-
-import java.util.Map;
-
-import org.apache.asterix.common.exceptions.AsterixException;
-import org.apache.asterix.external.api.AsterixInputStream;
-import org.apache.asterix.external.input.record.reader.stream.EmptyLineSeparatedRecordReader;
-import org.apache.asterix.external.input.record.reader.stream.LineRecordReader;
-import org.apache.asterix.external.input.record.reader.stream.QuotedLineRecordReader;
-import org.apache.asterix.external.input.record.reader.stream.SemiStructuredRecordReader;
-import org.apache.asterix.external.input.record.reader.stream.StreamRecordReader;
-import org.apache.asterix.external.util.ExternalDataConstants;
-import org.apache.asterix.external.util.ExternalDataUtils;
-import org.apache.hyracks.api.exceptions.HyracksDataException;
-
-public class StreamRecordReaderProvider {
-    public enum Format {
-        SEMISTRUCTURED,
-        CSV,
-        LINE_SEPARATED
-    }
-
-    public static Format getReaderFormat(Map<String, String> configuration) throws AsterixException {
-        String format = configuration.get(ExternalDataConstants.KEY_FORMAT);
-        if (format != null) {
-            switch (format) {
-                case ExternalDataConstants.FORMAT_ADM:
-                case ExternalDataConstants.FORMAT_JSON:
-                case ExternalDataConstants.FORMAT_SEMISTRUCTURED:
-                    return Format.SEMISTRUCTURED;
-                case ExternalDataConstants.FORMAT_LINE_SEPARATED:
-                    return Format.LINE_SEPARATED;
-                case ExternalDataConstants.FORMAT_DELIMITED_TEXT:
-                case ExternalDataConstants.FORMAT_CSV:
-                    return Format.CSV;
-            }
-            throw new AsterixException("Unknown format: " + format);
-        }
-        throw new AsterixException("Unspecified paramter: " + ExternalDataConstants.KEY_FORMAT);
-    }
-
-    public static StreamRecordReader createRecordReader(Format format, AsterixInputStream inputStream,
-            Map<String, String> configuration) throws HyracksDataException {
-        switch (format) {
-            case CSV:
-                String quoteString = configuration.get(ExternalDataConstants.KEY_QUOTE);
-                boolean hasHeader = ExternalDataUtils.hasHeader(configuration);
-                if (quoteString != null) {
-                    return new QuotedLineRecordReader(hasHeader, inputStream, quoteString);
-                } else {
-                    return new LineRecordReader(hasHeader, inputStream);
-                }
-            case LINE_SEPARATED:
-                return new EmptyLineSeparatedRecordReader(inputStream);
-            case SEMISTRUCTURED:
-                return new SemiStructuredRecordReader(inputStream,
-                        configuration.get(ExternalDataConstants.KEY_RECORD_START),
-                        configuration.get(ExternalDataConstants.KEY_RECORD_END));
-            default:
-                throw new HyracksDataException("Unknown format: " + format);
-        }
-    }
-}