You are viewing a plain text version of this content. The canonical link for it is here.
Posted to dev@drill.apache.org by "Khurram Faraaz (JIRA)" <ji...@apache.org> on 2018/07/11 07:34:00 UTC

[jira] [Created] (DRILL-6590) DATA_WRITE ERROR: Hash Join failed to write to output file: /tmp/drill/spill/24bac407

Khurram Faraaz created DRILL-6590:
-------------------------------------

             Summary: DATA_WRITE ERROR: Hash Join failed to write to output file: /tmp/drill/spill/24bac407
                 Key: DRILL-6590
                 URL: https://issues.apache.org/jira/browse/DRILL-6590
             Project: Apache Drill
          Issue Type: Bug
          Components: Execution - Flow
    Affects Versions: 1.14.0
            Reporter: Khurram Faraaz


Apache Drill 1.14.0 git.commit.id.abbrev=eb946b0

There was enough space on /tmp, however Hash Join failed to write to spill file
[test@qa102-45 drill-1.14.0]# clush -a df -h /tmp
<IPADDRESS>: Filesystem Size Used Avail Use% Mounted on
<IPADDRESS>: /dev/mapper/vg_root-lv_root 500G 150G 351G 30% /
<IPADDRESS>: Filesystem Size Used Avail Use% Mounted on
<IPADDRESS>: /dev/mapper/vg_root-lv_root 500G 17G 484G 4% /
<IPADDRESS>: Filesystem Size Used Avail Use% Mounted on
<IPADDRESS>: /dev/mapper/vg_root-lv_root 500G 14G 487G 3% /
<IPADDRESS>: Filesystem Size Used Avail Use% Mounted on
<IPADDRESS>: /dev/mapper/vg_root-lv_root 500G 13G 488G 3% /

Stack trace from drillbit.log
{noformat}
2018-07-10 18:17:51,953 [BitServer-10] WARN o.a.d.exec.rpc.control.WorkEventBus - A fragment message arrived but there was no registered listener for that message: profile {
 state: FAILED
 error {
 error_id: "6e258de2-2d4f-4b48-967d-df1b329955cd"
 endpoint {
 address: "qa102-48.qa.lab"
 user_port: 31010
 control_port: 31011
 data_port: 31012
 version: "1.14.0-SNAPSHOT"
 state: STARTUP
 }
 error_type: DATA_WRITE
 message: "DATA_WRITE ERROR: Hash Join failed to write to output file: /tmp/drill/spill/24bac407-2adb-5763-ed08-cb5714dca2c0_HashJoin_4-22-53/spill15_outer\n\nFragment 4:53\n\n[Error Id: 6e258de2-2d4f-4b48-967d-df1b329955cd on qa102-48.qa.lab:31010]"
 exception {
 exception_class: "java.nio.channels.ClosedByInterruptException"
 stack_trace {
 class_name: "..."
 line_number: 0
 method_name: "..."
 is_native_method: false
 }
 stack_trace {
 class_name: "com.google.protobuf.CodedOutputStream"
 file_name: "CodedOutputStream.java"
 line_number: 833
 method_name: "refreshBuffer"
 is_native_method: false
 }
 stack_trace {
 class_name: "com.google.protobuf.CodedOutputStream"
 file_name: "CodedOutputStream.java"
 line_number: 843
 method_name: "flush"
 is_native_method: false
 }
 stack_trace {
 class_name: "com.google.protobuf.AbstractMessageLite"
 file_name: "AbstractMessageLite.java"
 line_number: 91
 method_name: "writeDelimitedTo"
 is_native_method: false
 }
 stack_trace {
 class_name: "org.apache.drill.exec.cache.VectorSerializer$Writer"
 file_name: "VectorSerializer.java"
 line_number: 97
 method_name: "write"
 is_native_method: false
 }
 stack_trace {
 class_name: "org.apache.drill.exec.physical.impl.common.HashPartition"
 file_name: "HashPartition.java"
 line_number: 346
 method_name: "spillThisPartition"
 is_native_method: false
 }
 stack_trace {
 class_name: "org.apache.drill.exec.physical.impl.common.HashPartition"
 file_name: "HashPartition.java"
 line_number: 263
 method_name: "completeABatch"
 is_native_method: false
 }
 stack_trace {
 class_name: "org.apache.drill.exec.physical.impl.common.HashPartition"
 file_name: "HashPartition.java"
 line_number: 237
 method_name: "completeAnOuterBatch"
 is_native_method: false
 }
 stack_trace {
 class_name: "org.apache.drill.exec.physical.impl.common.HashPartition"
 file_name: "HashPartition.java"
 line_number: 232
 method_name: "appendOuterRow"
 is_native_method: false
 }
 stack_trace {
 class_name: "org.apache.drill.exec.test.generated.HashJoinProbeGen49"
 file_name: "HashJoinProbeTemplate.java"
 line_number: 306
 method_name: "executeProbePhase"
 is_native_method: false
 }
 stack_trace {
 class_name: "org.apache.drill.exec.test.generated.HashJoinProbeGen49"
 file_name: "HashJoinProbeTemplate.java"
 line_number: 393
 method_name: "probeAndProject"
 is_native_method: false
 }
 stack_trace {
 class_name: "org.apache.drill.exec.physical.impl.join.HashJoinBatch"
 file_name: "HashJoinBatch.java"
 line_number: 357
 method_name: "innerNext"
 is_native_method: false
 }
 stack_trace {
 class_name: "org.apache.drill.exec.record.AbstractRecordBatch"
 file_name: "AbstractRecordBatch.java"
 line_number: 172
 method_name: "next"
 is_native_method: false
 }
 stack_trace {
 class_name: "org.apache.drill.exec.record.AbstractRecordBatch"
 file_name: "AbstractRecordBatch.java"
 line_number: 119
 method_name: "next"
 is_native_method: false
 }
 stack_trace {
 class_name: "org.apache.drill.exec.physical.impl.join.HashJoinBatch"
 file_name: "HashJoinBatch.java"
 line_number: 274
 method_name: "sniffNonEmptyBatch"
 is_native_method: false
 }
 stack_trace {
 class_name: "org.apache.drill.exec.physical.impl.join.HashJoinBatch"
 file_name: "HashJoinBatch.java"
 line_number: 236
 method_name: "prefetchFirstBatchFromBothSides"
 is_native_method: false
 }
 stack_trace {
 class_name: "org.apache.drill.exec.physical.impl.join.HashJoinBatch"
 file_name: "HashJoinBatch.java"
 line_number: 216
 method_name: "buildSchema"
 is_native_method: false
 }
 stack_trace {
 class_name: "org.apache.drill.exec.record.AbstractRecordBatch"
 file_name: "AbstractRecordBatch.java"
 line_number: 152
 method_name: "next"
 is_native_method: false
 }
 stack_trace {
 class_name: "org.apache.drill.exec.record.AbstractRecordBatch"
 file_name: "AbstractRecordBatch.java"
 line_number: 119
 method_name: "next"
 is_native_method: false
 }
 stack_trace {
 class_name: "org.apache.drill.exec.physical.impl.join.HashJoinBatch"
 file_name: "HashJoinBatch.java"
 line_number: 274
 method_name: "sniffNonEmptyBatch"
 is_native_method: false
 }
 stack_trace {
 class_name: "org.apache.drill.exec.physical.impl.join.HashJoinBatch"
 file_name: "HashJoinBatch.java"
 line_number: 236
 method_name: "prefetchFirstBatchFromBothSides"
 is_native_method: false
 }
 stack_trace {
 class_name: "org.apache.drill.exec.physical.impl.join.HashJoinBatch"
 file_name: "HashJoinBatch.java"
 line_number: 216
 method_name: "buildSchema"
 is_native_method: false
 }
 stack_trace {
 class_name: "org.apache.drill.exec.record.AbstractRecordBatch"
 file_name: "AbstractRecordBatch.java"
 line_number: 152
 method_name: "next"
 is_native_method: false
 }
 stack_trace {
 class_name: "org.apache.drill.exec.record.AbstractRecordBatch"
 file_name: "AbstractRecordBatch.java"
 line_number: 119
 method_name: "next"
 is_native_method: false
 }
 stack_trace {
 class_name: "org.apache.drill.exec.physical.impl.join.HashJoinBatch"
 file_name: "HashJoinBatch.java"
 line_number: 274
 method_name: "sniffNonEmptyBatch"
 is_native_method: false
 }
 stack_trace {
 class_name: "org.apache.drill.exec.physical.impl.join.HashJoinBatch"
 file_name: "HashJoinBatch.java"
 line_number: 236
 method_name: "prefetchFirstBatchFromBothSides"
 is_native_method: false
 }
 stack_trace {
 class_name: "org.apache.drill.exec.physical.impl.join.HashJoinBatch"
 file_name: "HashJoinBatch.java"
 line_number: 216
 method_name: "buildSchema"
 is_native_method: false
 }
 stack_trace {
 class_name: "org.apache.drill.exec.record.AbstractRecordBatch"
 file_name: "AbstractRecordBatch.java"
 line_number: 152
 method_name: "next"
 is_native_method: false
 }
 stack_trace {
 class_name: "org.apache.drill.exec.record.AbstractRecordBatch"
 file_name: "AbstractRecordBatch.java"
 line_number: 119
 method_name: "next"
 is_native_method: false
 }
 stack_trace {
 class_name: "org.apache.drill.exec.physical.impl.join.HashJoinBatch"
 file_name: "HashJoinBatch.java"
 line_number: 274
 method_name: "sniffNonEmptyBatch"
 is_native_method: false
 }
 stack_trace {
 class_name: "org.apache.drill.exec.physical.impl.join.HashJoinBatch"
 file_name: "HashJoinBatch.java"
 line_number: 236
 method_name: "prefetchFirstBatchFromBothSides"
 is_native_method: false
 }
 stack_trace {
 class_name: "org.apache.drill.exec.physical.impl.join.HashJoinBatch"
 file_name: "HashJoinBatch.java"
 line_number: 216
 method_name: "buildSchema"
 is_native_method: false
 }
 stack_trace {
 class_name: "org.apache.drill.exec.record.AbstractRecordBatch"
 file_name: "AbstractRecordBatch.java"
 line_number: 152
 method_name: "next"
 is_native_method: false
 }
 stack_trace {
 class_name: "org.apache.drill.exec.record.AbstractRecordBatch"
 file_name: "AbstractRecordBatch.java"
 line_number: 119
 method_name: "next"
 is_native_method: false
 }
 stack_trace {
 class_name: "org.apache.drill.exec.physical.impl.join.HashJoinBatch"
 file_name: "HashJoinBatch.java"
 line_number: 274
 method_name: "sniffNonEmptyBatch"
 is_native_method: false
 }
 stack_trace {
 class_name: "org.apache.drill.exec.physical.impl.join.HashJoinBatch"
 file_name: "HashJoinBatch.java"
 line_number: 236
 method_name: "prefetchFirstBatchFromBothSides"
 is_native_method: false
 }
 stack_trace {
 class_name: "org.apache.drill.exec.physical.impl.join.HashJoinBatch"
 file_name: "HashJoinBatch.java"
 line_number: 216
 method_name: "buildSchema"
 is_native_method: false
 }
 stack_trace {
 class_name: "org.apache.drill.exec.record.AbstractRecordBatch"
 file_name: "AbstractRecordBatch.java"
 line_number: 152
 method_name: "next"
 is_native_method: false
 }
 stack_trace {
 class_name: "org.apache.drill.exec.record.AbstractRecordBatch"
 file_name: "AbstractRecordBatch.java"
 line_number: 119
 method_name: "next"
 is_native_method: false
 }
 stack_trace {
 class_name: "org.apache.drill.exec.record.AbstractRecordBatch"
 file_name: "AbstractRecordBatch.java"
 line_number: 109
 method_name: "next"
 is_native_method: false
 }
 stack_trace {
 class_name: "org.apache.drill.exec.record.AbstractUnaryRecordBatch"
 file_name: "AbstractUnaryRecordBatch.java"
 line_number: 63
 method_name: "innerNext"
 is_native_method: false
 }
 stack_trace {
 class_name: "org.apache.drill.exec.physical.impl.project.ProjectRecordBatch"
 file_name: "ProjectRecordBatch.java"
 line_number: 147
 method_name: "innerNext"
 is_native_method: false
 }
 stack_trace {
 class_name: "org.apache.drill.exec.record.AbstractRecordBatch"
 file_name: "AbstractRecordBatch.java"
 line_number: 172
 method_name: "next"
 is_native_method: false
 }
 stack_trace {
 class_name: "org.apache.drill.exec.record.AbstractRecordBatch"
 file_name: "AbstractRecordBatch.java"
 line_number: 119
 method_name: "next"
 is_native_method: false
 }
 stack_trace {
 class_name: "org.apache.drill.exec.record.AbstractRecordBatch"
 file_name: "AbstractRecordBatch.java"
 line_number: 109
 method_name: "next"
 is_native_method: false
 }
 stack_trace {
 class_name: "org.apache.drill.exec.record.AbstractUnaryRecordBatch"
 file_name: "AbstractUnaryRecordBatch.java"
 line_number: 63
 method_name: "innerNext"
 is_native_method: false
 }
 stack_trace {
 class_name: "org.apache.drill.exec.record.AbstractRecordBatch"
 file_name: "AbstractRecordBatch.java"
 line_number: 172
 method_name: "next"
 is_native_method: false
 }
 stack_trace {
 class_name: "org.apache.drill.exec.record.AbstractRecordBatch"
 file_name: "AbstractRecordBatch.java"
 line_number: 119
 method_name: "next"
 is_native_method: false
 }
 stack_trace {
 class_name: "org.apache.drill.exec.record.AbstractRecordBatch"
 file_name: "AbstractRecordBatch.java"
 line_number: 109
 method_name: "next"
 is_native_method: false
 }
 stack_trace {
 class_name: "org.apache.drill.exec.record.AbstractUnaryRecordBatch"
 file_name: "AbstractUnaryRecordBatch.java"
 line_number: 63
 method_name: "innerNext"
 is_native_method: false
 }
 stack_trace {
 class_name: "org.apache.drill.exec.record.AbstractRecordBatch"
 file_name: "AbstractRecordBatch.java"
 line_number: 172
 method_name: "next"
 is_native_method: false
 }
 stack_trace {
 class_name: "org.apache.drill.exec.record.AbstractRecordBatch"
 file_name: "AbstractRecordBatch.java"
 line_number: 119
 method_name: "next"
 is_native_method: false
 }
 stack_trace {
 class_name: "org.apache.drill.exec.physical.impl.join.HashJoinBatch"
 file_name: "HashJoinBatch.java"
 line_number: 274
 method_name: "sniffNonEmptyBatch"
 is_native_method: false
 }
 stack_trace {
 class_name: "org.apache.drill.exec.physical.impl.join.HashJoinBatch"
 file_name: "HashJoinBatch.java"
 line_number: 236
 method_name: "prefetchFirstBatchFromBothSides"
 is_native_method: false
 }
 stack_trace {
 class_name: "org.apache.drill.exec.physical.impl.join.HashJoinBatch"
 file_name: "HashJoinBatch.java"
 line_number: 216
 method_name: "buildSchema"
 is_native_method: false
 }
 stack_trace {
 class_name: "org.apache.drill.exec.record.AbstractRecordBatch"
 file_name: "AbstractRecordBatch.java"
 line_number: 152
 method_name: "next"
 is_native_method: false
 }
 stack_trace {
 class_name: "org.apache.drill.exec.record.AbstractRecordBatch"
 file_name: "AbstractRecordBatch.java"
 line_number: 119
 method_name: "next"
 is_native_method: false
 }
 stack_trace {
 class_name: "org.apache.drill.exec.physical.impl.join.HashJoinBatch"
 file_name: "HashJoinBatch.java"
 line_number: 274
 method_name: "sniffNonEmptyBatch"
 is_native_method: false
 }
 stack_trace {
 class_name: "org.apache.drill.exec.physical.impl.join.HashJoinBatch"
 file_name: "HashJoinBatch.java"
 line_number: 236
 method_name: "prefetchFirstBatchFromBothSides"
 is_native_method: false
 }
 stack_trace {
 class_name: "org.apache.drill.exec.physical.impl.join.HashJoinBatch"
 file_name: "HashJoinBatch.java"
 line_number: 216
 method_name: "buildSchema"
 is_native_method: false
 }
 stack_trace {
 class_name: "org.apache.drill.exec.record.AbstractRecordBatch"
 file_name: "AbstractRecordBatch.java"
 line_number: 152
 method_name: "next"
 is_native_method: false
 }
 stack_trace {
 class_name: "org.apache.drill.exec.record.AbstractRecordBatch"
 file_name: "AbstractRecordBatch.java"
 line_number: 119
 method_name: "next"
 is_native_method: false
 }
 stack_trace {
 class_name: "org.apache.drill.exec.record.AbstractRecordBatch"
 file_name: "AbstractRecordBatch.java"
 line_number: 109
 method_name: "next"
 is_native_method: false
 }
 stack_trace {
 class_name: "org.apache.drill.exec.record.AbstractUnaryRecordBatch"
 file_name: "AbstractUnaryRecordBatch.java"
 line_number: 63
 method_name: "innerNext"
 is_native_method: false
 }
 stack_trace {
 class_name: "org.apache.drill.exec.physical.impl.project.ProjectRecordBatch"
 file_name: "ProjectRecordBatch.java"
 line_number: 147
 method_name: "innerNext"
 is_native_method: false
 }
 stack_trace {
 class_name: "org.apache.drill.exec.record.AbstractRecordBatch"
 file_name: "AbstractRecordBatch.java"
 line_number: 172
 method_name: "next"
 is_native_method: false
 }
 stack_trace {
 class_name: "org.apache.drill.exec.record.AbstractRecordBatch"
 file_name: "AbstractRecordBatch.java"
 line_number: 119
 method_name: "next"
 is_native_method: false
 }
 stack_trace {
 class_name: "org.apache.drill.exec.record.AbstractRecordBatch"
 file_name: "AbstractRecordBatch.java"
 line_number: 109
 method_name: "next"
 is_native_method: false
 }
 stack_trace {
 class_name: "org.apache.drill.exec.physical.impl.aggregate.HashAggBatch"
 file_name: "HashAggBatch.java"
 line_number: 199
 method_name: "buildSchema"
 is_native_method: false
 }
 stack_trace {
 class_name: "org.apache.drill.exec.record.AbstractRecordBatch"
 file_name: "AbstractRecordBatch.java"
 line_number: 152
 method_name: "next"
 is_native_method: false
 }
 stack_trace {
 class_name: "org.apache.drill.exec.record.AbstractRecordBatch"
 file_name: "AbstractRecordBatch.java"
 line_number: 119
 method_name: "next"
 is_native_method: false
 }
 stack_trace {
 class_name: "org.apache.drill.exec.record.AbstractRecordBatch"
 file_name: "AbstractRecordBatch.java"
 line_number: 109
 method_name: "next"
 is_native_method: false
 }
 stack_trace {
 class_name: "org.apache.drill.exec.record.AbstractUnaryRecordBatch"
 file_name: "AbstractUnaryRecordBatch.java"
 line_number: 63
 method_name: "innerNext"
 is_native_method: false
 }
 stack_trace {
 class_name: "org.apache.drill.exec.physical.impl.project.ProjectRecordBatch"
 file_name: "ProjectRecordBatch.java"
 line_number: 147
 method_name: "innerNext"
 is_native_method: false
 }
 stack_trace {
 class_name: "org.apache.drill.exec.record.AbstractRecordBatch"
 file_name: "AbstractRecordBatch.java"
 line_number: 172
 method_name: "next"
 is_native_method: false
 }
 stack_trace {
 class_name: "org.apache.drill.exec.physical.impl.BaseRootExec"
 file_name: "BaseRootExec.java"
 line_number: 103
 method_name: "next"
 is_native_method: false
 }
 stack_trace {
 class_name: "org.apache.drill.exec.physical.impl.SingleSenderCreator$SingleSenderRootExec"
 file_name: "SingleSenderCreator.java"
 line_number: 93
 method_name: "innerNext"
 is_native_method: false
 }
 stack_trace {
 class_name: "org.apache.drill.exec.physical.impl.BaseRootExec"
 file_name: "BaseRootExec.java"
 line_number: 93
 method_name: "next"
 is_native_method: false
 }
 stack_trace {
 class_name: "org.apache.drill.exec.work.fragment.FragmentExecutor$1"
 file_name: "FragmentExecutor.java"
 line_number: 294
 method_name: "run"
 is_native_method: false
 }
 stack_trace {
 class_name: "org.apache.drill.exec.work.fragment.FragmentExecutor$1"
 file_name: "FragmentExecutor.java"
 line_number: 281
 method_name: "run"
 is_native_method: false
 }
 stack_trace {
 class_name: "..."
 line_number: 0
 method_name: "..."
 is_native_method: false
 }
 stack_trace {
 class_name: "org.apache.hadoop.security.UserGroupInformation"
 file_name: "UserGroupInformation.java"
 line_number: 1595
 method_name: "doAs"
 is_native_method: false
 }
 stack_trace {
 class_name: "org.apache.drill.exec.work.fragment.FragmentExecutor"
 file_name: "FragmentExecutor.java"
 line_number: 281
 method_name: "run"
 is_native_method: false
 }
 stack_trace {
 class_name: "org.apache.drill.common.SelfCleaningRunnable"
 file_name: "SelfCleaningRunnable.java"
 line_number: 38
 method_name: "run"
 is_native_method: false
 }
{noformat}



--
This message was sent by Atlassian JIRA
(v7.6.3#76005)