You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@beam.apache.org by Apache Jenkins Server <je...@builds.apache.org> on 2018/05/09 03:38:51 UTC

Build failed in Jenkins: beam_PostCommit_Python_Verify #4904

See <https://builds.apache.org/job/beam_PostCommit_Python_Verify/4904/display/redirect>

------------------------------------------
[...truncated 1.27 MB...]
namenode_1  | 18/05/09 03:37:34 INFO namenode.FSNamesystem: dfs.namenode.safemode.min.datanodes = 0
namenode_1  | 18/05/09 03:37:34 INFO namenode.FSNamesystem: dfs.namenode.safemode.extension     = 30000
namenode_1  | 18/05/09 03:37:34 INFO metrics.TopMetrics: NNTop conf: dfs.namenode.top.window.num.buckets = 10
namenode_1  | 18/05/09 03:37:34 INFO metrics.TopMetrics: NNTop conf: dfs.namenode.top.num.users = 10
namenode_1  | 18/05/09 03:37:34 INFO metrics.TopMetrics: NNTop conf: dfs.namenode.top.windows.minutes = 1,5,25
namenode_1  | 18/05/09 03:37:34 INFO namenode.FSNamesystem: Retry cache on namenode is enabled
namenode_1  | 18/05/09 03:37:34 INFO namenode.FSNamesystem: Retry cache will use 0.03 of total heap and retry cache entry expiry time is 600000 millis
namenode_1  | 18/05/09 03:37:34 INFO util.GSet: Computing capacity for map NameNodeRetryCache
namenode_1  | 18/05/09 03:37:34 INFO util.GSet: VM type       = 64-bit
namenode_1  | 18/05/09 03:37:34 INFO util.GSet: 0.029999999329447746% max memory 958.5 MB = 294.5 KB
namenode_1  | 18/05/09 03:37:34 INFO util.GSet: capacity      = 2^15 = 32768 entries
namenode_1  | 18/05/09 03:37:34 INFO common.Storage: Lock on /hadoop/dfs/name/in_use.lock acquired by nodename 143@namenode
namenode_1  | 18/05/09 03:37:34 INFO namenode.FileJournalManager: Recovering unfinalized segments in /hadoop/dfs/name/current
namenode_1  | 18/05/09 03:37:34 INFO namenode.FSImage: No edit log streams selected.
namenode_1  | 18/05/09 03:37:34 INFO namenode.FSImage: Planning to load image: FSImageFile(file=/hadoop/dfs/name/current/fsimage_0000000000000000000, cpktTxId=0000000000000000000)
namenode_1  | 18/05/09 03:37:34 INFO namenode.FSImageFormatPBINode: Loading 1 INodes.
namenode_1  | 18/05/09 03:37:34 INFO namenode.FSImageFormatProtobuf: Loaded FSImage in 0 seconds.
namenode_1  | 18/05/09 03:37:34 INFO namenode.FSImage: Loaded image for txid 0 from /hadoop/dfs/name/current/fsimage_0000000000000000000
namenode_1  | 18/05/09 03:37:34 INFO namenode.FSNamesystem: Need to save fs image? false (staleImage=false, haEnabled=false, isRollingUpgrade=false)
namenode_1  | 18/05/09 03:37:34 INFO namenode.FSEditLog: Starting log segment at 1
datanode_1  | 18/05/09 03:37:34 INFO ipc.Client: Retrying connect to server: namenode/172.18.0.2:8020. Already tried 0 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1000 MILLISECONDS)
namenode_1  | 18/05/09 03:37:34 INFO namenode.NameCache: initialized with 0 entries 0 lookups
namenode_1  | 18/05/09 03:37:34 INFO namenode.FSNamesystem: Finished loading FSImage in 301 msecs
namenode_1  | 18/05/09 03:37:34 INFO namenode.NameNode: RPC server is binding to 0.0.0.0:8020
namenode_1  | 18/05/09 03:37:34 INFO ipc.CallQueueManager: Using callQueue: class java.util.concurrent.LinkedBlockingQueue queueCapacity: 1000 scheduler: class org.apache.hadoop.ipc.DefaultRpcScheduler
namenode_1  | 18/05/09 03:37:34 INFO ipc.Server: Starting Socket Reader #1 for port 8020
namenode_1  | 18/05/09 03:37:34 INFO namenode.FSNamesystem: Registered FSNamesystemState MBean
namenode_1  | 18/05/09 03:37:34 INFO namenode.LeaseManager: Number of blocks under construction: 0
namenode_1  | 18/05/09 03:37:34 INFO blockmanagement.BlockManager: initializing replication queues
namenode_1  | 18/05/09 03:37:34 INFO hdfs.StateChange: STATE* Leaving safe mode after 0 secs
namenode_1  | 18/05/09 03:37:34 INFO hdfs.StateChange: STATE* Network topology has 0 racks and 0 datanodes
namenode_1  | 18/05/09 03:37:34 INFO hdfs.StateChange: STATE* UnderReplicatedBlocks has 0 blocks
namenode_1  | 18/05/09 03:37:34 INFO blockmanagement.BlockManager: Total number of blocks            = 0
namenode_1  | 18/05/09 03:37:34 INFO blockmanagement.BlockManager: Number of invalid blocks          = 0
namenode_1  | 18/05/09 03:37:34 INFO blockmanagement.BlockManager: Number of under-replicated blocks = 0
namenode_1  | 18/05/09 03:37:34 INFO blockmanagement.BlockManager: Number of  over-replicated blocks = 0
namenode_1  | 18/05/09 03:37:34 INFO blockmanagement.BlockManager: Number of blocks being written    = 0
namenode_1  | 18/05/09 03:37:34 INFO hdfs.StateChange: STATE* Replication Queue initialization scan for invalid, over- and under-replicated blocks completed in 11 msec
namenode_1  | 18/05/09 03:37:34 INFO ipc.Server: IPC Server Responder: starting
namenode_1  | 18/05/09 03:37:34 INFO ipc.Server: IPC Server listener on 8020: starting
namenode_1  | 18/05/09 03:37:34 INFO namenode.NameNode: NameNode RPC up at: namenode/172.18.0.2:8020
namenode_1  | 18/05/09 03:37:34 INFO namenode.FSNamesystem: Starting services required for active state
namenode_1  | 18/05/09 03:37:34 INFO namenode.FSDirectory: Initializing quota with 4 thread(s)
namenode_1  | 18/05/09 03:37:34 INFO namenode.FSDirectory: Quota initialization completed in 4 milliseconds
namenode_1  | name space=1
namenode_1  | storage space=0
namenode_1  | storage types=RAM_DISK=0, SSD=0, DISK=0, ARCHIVE=0
namenode_1  | 18/05/09 03:37:34 INFO blockmanagement.CacheReplicationMonitor: Starting CacheReplicationMonitor with interval 30000 milliseconds
datanode_1  | 18/05/09 03:37:35 INFO ipc.Client: Retrying connect to server: namenode/172.18.0.2:8020. Already tried 1 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1000 MILLISECONDS)
datanode_1  | 18/05/09 03:37:35 INFO datanode.DataNode: Acknowledging ACTIVE Namenode during handshakeBlock pool <registering> (Datanode Uuid unassigned) service to namenode/172.18.0.2:8020
datanode_1  | 18/05/09 03:37:35 INFO common.Storage: Using 1 threads to upgrade data directories (dfs.datanode.parallel.volumes.load.threads.num=1, dataDirs=1)
datanode_1  | 18/05/09 03:37:35 INFO common.Storage: Lock on /hadoop/dfs/data/in_use.lock acquired by nodename 83@datanode
datanode_1  | 18/05/09 03:37:35 INFO common.Storage: Storage directory /hadoop/dfs/data is not formatted for namespace 1999203236. Formatting...
datanode_1  | 18/05/09 03:37:35 INFO common.Storage: Generated new storageID DS-6c0fdbe7-4c77-458e-a5f1-a89a45c546ff for directory /hadoop/dfs/data
datanode_1  | 18/05/09 03:37:35 INFO common.Storage: Analyzing storage directories for bpid BP-892499668-172.18.0.2-1525837052284
datanode_1  | 18/05/09 03:37:35 INFO common.Storage: Locking is disabled for /hadoop/dfs/data/current/BP-892499668-172.18.0.2-1525837052284
datanode_1  | 18/05/09 03:37:35 INFO common.Storage: Block pool storage directory /hadoop/dfs/data/current/BP-892499668-172.18.0.2-1525837052284 is not formatted for BP-892499668-172.18.0.2-1525837052284. Formatting ...
datanode_1  | 18/05/09 03:37:35 INFO common.Storage: Formatting block pool BP-892499668-172.18.0.2-1525837052284 directory /hadoop/dfs/data/current/BP-892499668-172.18.0.2-1525837052284/current
datanode_1  | 18/05/09 03:37:35 INFO datanode.DataNode: Setting up storage: nsid=1999203236;bpid=BP-892499668-172.18.0.2-1525837052284;lv=-57;nsInfo=lv=-63;cid=CID-f92a0eb3-ce75-42b1-83a5-9ad6c0faa57c;nsid=1999203236;c=1525837052284;bpid=BP-892499668-172.18.0.2-1525837052284;dnuuid=null
datanode_1  | 18/05/09 03:37:35 INFO datanode.DataNode: Generated and persisted new Datanode UUID ea93857e-c0c6-494c-887b-ab0ab1e96b58
datanode_1  | 18/05/09 03:37:35 INFO impl.FsDatasetImpl: Added new volume: DS-6c0fdbe7-4c77-458e-a5f1-a89a45c546ff
datanode_1  | 18/05/09 03:37:35 INFO impl.FsDatasetImpl: Added volume - /hadoop/dfs/data/current, StorageType: DISK
datanode_1  | 18/05/09 03:37:35 INFO impl.FsDatasetImpl: Registered FSDatasetState MBean
datanode_1  | 18/05/09 03:37:35 INFO impl.FsDatasetImpl: Volume reference is released.
datanode_1  | 18/05/09 03:37:35 INFO impl.FsDatasetImpl: Adding block pool BP-892499668-172.18.0.2-1525837052284
datanode_1  | 18/05/09 03:37:35 INFO impl.FsDatasetImpl: Scanning block pool BP-892499668-172.18.0.2-1525837052284 on volume /hadoop/dfs/data/current...
datanode_1  | 18/05/09 03:37:35 INFO impl.FsDatasetImpl: Time taken to scan block pool BP-892499668-172.18.0.2-1525837052284 on /hadoop/dfs/data/current: 14ms
datanode_1  | 18/05/09 03:37:35 INFO impl.FsDatasetImpl: Total time to scan all replicas for block pool BP-892499668-172.18.0.2-1525837052284: 16ms
datanode_1  | 18/05/09 03:37:35 INFO impl.FsDatasetImpl: Adding replicas to map for block pool BP-892499668-172.18.0.2-1525837052284 on volume /hadoop/dfs/data/current...
datanode_1  | 18/05/09 03:37:35 INFO impl.BlockPoolSlice: Replica Cache file: /hadoop/dfs/data/current/BP-892499668-172.18.0.2-1525837052284/current/replicas doesn't exist 
datanode_1  | 18/05/09 03:37:35 INFO impl.FsDatasetImpl: Time to add replicas to map for block pool BP-892499668-172.18.0.2-1525837052284 on volume /hadoop/dfs/data/current: 1ms
datanode_1  | 18/05/09 03:37:35 INFO impl.FsDatasetImpl: Total time to add all replicas to map: 1ms
datanode_1  | 18/05/09 03:37:35 INFO datanode.VolumeScanner: Now scanning bpid BP-892499668-172.18.0.2-1525837052284 on volume /hadoop/dfs/data
datanode_1  | 18/05/09 03:37:35 INFO datanode.VolumeScanner: VolumeScanner(/hadoop/dfs/data, DS-6c0fdbe7-4c77-458e-a5f1-a89a45c546ff): finished scanning block pool BP-892499668-172.18.0.2-1525837052284
datanode_1  | 18/05/09 03:37:35 INFO datanode.DirectoryScanner: Periodic Directory Tree Verification scan starting at 5/9/18 9:14 AM with interval of 21600000ms
datanode_1  | 18/05/09 03:37:35 INFO datanode.DataNode: Block pool BP-892499668-172.18.0.2-1525837052284 (Datanode Uuid ea93857e-c0c6-494c-887b-ab0ab1e96b58) service to namenode/172.18.0.2:8020 beginning handshake with NN
datanode_1  | 18/05/09 03:37:35 INFO datanode.VolumeScanner: VolumeScanner(/hadoop/dfs/data, DS-6c0fdbe7-4c77-458e-a5f1-a89a45c546ff): no suitable block pools found to scan.  Waiting 1814399970 ms.
namenode_1  | 18/05/09 03:37:35 INFO hdfs.StateChange: BLOCK* registerDatanode: from DatanodeRegistration(172.18.0.3:50010, datanodeUuid=ea93857e-c0c6-494c-887b-ab0ab1e96b58, infoPort=50075, infoSecurePort=0, ipcPort=50020, storageInfo=lv=-57;cid=CID-f92a0eb3-ce75-42b1-83a5-9ad6c0faa57c;nsid=1999203236;c=1525837052284) storage ea93857e-c0c6-494c-887b-ab0ab1e96b58
namenode_1  | 18/05/09 03:37:35 INFO net.NetworkTopology: Adding a new node: /default-rack/172.18.0.3:50010
namenode_1  | 18/05/09 03:37:35 INFO blockmanagement.BlockReportLeaseManager: Registered DN ea93857e-c0c6-494c-887b-ab0ab1e96b58 (172.18.0.3:50010).
datanode_1  | 18/05/09 03:37:35 INFO datanode.DataNode: Block pool Block pool BP-892499668-172.18.0.2-1525837052284 (Datanode Uuid ea93857e-c0c6-494c-887b-ab0ab1e96b58) service to namenode/172.18.0.2:8020 successfully registered with NN
datanode_1  | 18/05/09 03:37:35 INFO datanode.DataNode: For namenode namenode/172.18.0.2:8020 using BLOCKREPORT_INTERVAL of 21600000msec CACHEREPORT_INTERVAL of 10000msec Initial delay: 0msec; heartBeatInterval=3000
namenode_1  | 18/05/09 03:37:35 INFO blockmanagement.DatanodeDescriptor: Adding new storage ID DS-6c0fdbe7-4c77-458e-a5f1-a89a45c546ff for DN 172.18.0.3:50010
namenode_1  | 18/05/09 03:37:35 INFO BlockStateChange: BLOCK* processReport 0x390ca072ee5486e1: Processing first storage report for DS-6c0fdbe7-4c77-458e-a5f1-a89a45c546ff from datanode ea93857e-c0c6-494c-887b-ab0ab1e96b58
namenode_1  | 18/05/09 03:37:35 INFO BlockStateChange: BLOCK* processReport 0x390ca072ee5486e1: from storage DS-6c0fdbe7-4c77-458e-a5f1-a89a45c546ff node DatanodeRegistration(172.18.0.3:50010, datanodeUuid=ea93857e-c0c6-494c-887b-ab0ab1e96b58, infoPort=50075, infoSecurePort=0, ipcPort=50020, storageInfo=lv=-57;cid=CID-f92a0eb3-ce75-42b1-83a5-9ad6c0faa57c;nsid=1999203236;c=1525837052284), blocks: 0, hasStaleStorage: false, processing time: 2 msecs, invalidatedBlocks: 0
datanode_1  | 18/05/09 03:37:35 INFO datanode.DataNode: Successfully sent block report 0x390ca072ee5486e1,  containing 1 storage report(s), of which we sent 1. The reports had 0 total blocks and used 1 RPC(s). This took 4 msec to generate and 52 msecs for RPC and NN processing. Got back one command: FinalizeCommand/5.
datanode_1  | 18/05/09 03:37:35 INFO datanode.DataNode: Got finalize command for block pool BP-892499668-172.18.0.2-1525837052284
test_1      | INFO	Instantiated configuration from '/app/sdks/python/apache_beam/io/hdfs_integration_test/hdfscli.cfg'.
test_1      | INFO	Instantiated <InsecureClient(url='http://namenode:50070')>.
test_1      | INFO	Uploading 'kinglear.txt' to '/'.
test_1      | DEBUG	Resolved path '/' to '/'.
test_1      | INFO	Listing '/'.
test_1      | DEBUG	Resolved path '/' to '/'.
test_1      | DEBUG	Resolved path '/' to '/'.
test_1      | DEBUG	Starting new HTTP connection (1): namenode
namenode_1  | May 09, 2018 3:38:19 AM com.sun.jersey.api.core.PackagesResourceConfig init
namenode_1  | INFO: Scanning for root resource and provider classes in the packages:
namenode_1  |   org.apache.hadoop.hdfs.server.namenode.web.resources
namenode_1  |   org.apache.hadoop.hdfs.web.resources
namenode_1  | May 09, 2018 3:38:19 AM com.sun.jersey.api.core.ScanningResourceConfig logClasses
namenode_1  | INFO: Root resource classes found:
namenode_1  |   class org.apache.hadoop.hdfs.server.namenode.web.resources.NamenodeWebHdfsMethods
namenode_1  | May 09, 2018 3:38:19 AM com.sun.jersey.api.core.ScanningResourceConfig logClasses
namenode_1  | INFO: Provider classes found:
namenode_1  |   class org.apache.hadoop.hdfs.web.resources.UserProvider
namenode_1  |   class org.apache.hadoop.hdfs.web.resources.ExceptionHandler
namenode_1  | May 09, 2018 3:38:19 AM com.sun.jersey.server.impl.application.WebApplicationImpl _initiate
namenode_1  | INFO: Initiating Jersey application, version 'Jersey: 1.9 09/02/2011 11:17 AM'
namenode_1  | May 09, 2018 3:38:20 AM com.sun.jersey.spi.inject.Errors processErrorMessages
namenode_1  | WARNING: The following warnings have been detected with resource and/or provider classes:
namenode_1  |   WARNING: A sub-resource method, public javax.ws.rs.core.Response org.apache.hadoop.hdfs.server.namenode.web.resources.NamenodeWebHdfsMethods.postRoot(org.apache.hadoop.security.UserGroupInformation,org.apache.hadoop.hdfs.web.resources.DelegationParam,org.apache.hadoop.hdfs.web.resources.UserParam,org.apache.hadoop.hdfs.web.resources.DoAsParam,org.apache.hadoop.hdfs.web.resources.PostOpParam,org.apache.hadoop.hdfs.web.resources.ConcatSourcesParam,org.apache.hadoop.hdfs.web.resources.BufferSizeParam,org.apache.hadoop.hdfs.web.resources.ExcludeDatanodesParam,org.apache.hadoop.hdfs.web.resources.NewLengthParam) throws java.io.IOException,java.lang.InterruptedException, with URI template, "/", is treated as a resource method
namenode_1  |   WARNING: A sub-resource method, public javax.ws.rs.core.Response org.apache.hadoop.hdfs.server.namenode.web.resources.NamenodeWebHdfsMethods.deleteRoot(org.apache.hadoop.security.UserGroupInformation,org.apache.hadoop.hdfs.web.resources.DelegationParam,org.apache.hadoop.hdfs.web.resources.UserParam,org.apache.hadoop.hdfs.web.resources.DoAsParam,org.apache.hadoop.hdfs.web.resources.DeleteOpParam,org.apache.hadoop.hdfs.web.resources.RecursiveParam,org.apache.hadoop.hdfs.web.resources.SnapshotNameParam) throws java.io.IOException,java.lang.InterruptedException, with URI template, "/", is treated as a resource method
namenode_1  |   WARNING: A sub-resource method, public javax.ws.rs.core.Response org.apache.hadoop.hdfs.server.namenode.web.resources.NamenodeWebHdfsMethods.putRoot(org.apache.hadoop.security.UserGroupInformation,org.apache.hadoop.hdfs.web.resources.DelegationParam,org.apache.hadoop.hdfs.web.resources.UserParam,org.apache.hadoop.hdfs.web.resources.DoAsParam,org.apache.hadoop.hdfs.web.resources.PutOpParam,org.apache.hadoop.hdfs.web.resources.DestinationParam,org.apache.hadoop.hdfs.web.resources.OwnerParam,org.apache.hadoop.hdfs.web.resources.GroupParam,org.apache.hadoop.hdfs.web.resources.PermissionParam,org.apache.hadoop.hdfs.web.resources.OverwriteParam,org.apache.hadoop.hdfs.web.resources.BufferSizeParam,org.apache.hadoop.hdfs.web.resources.ReplicationParam,org.apache.hadoop.hdfs.web.resources.BlockSizeParam,org.apache.hadoop.hdfs.web.resources.ModificationTimeParam,org.apache.hadoop.hdfs.web.resources.AccessTimeParam,org.apache.hadoop.hdfs.web.resources.RenameOptionSetParam,org.apache.hadoop.hdfs.web.resources.CreateParentParam,org.apache.hadoop.hdfs.web.resources.TokenArgumentParam,org.apache.hadoop.hdfs.web.resources.AclPermissionParam,org.apache.hadoop.hdfs.web.resources.XAttrNameParam,org.apache.hadoop.hdfs.web.resources.XAttrValueParam,org.apache.hadoop.hdfs.web.resources.XAttrSetFlagParam,org.apache.hadoop.hdfs.web.resources.SnapshotNameParam,org.apache.hadoop.hdfs.web.resources.OldSnapshotNameParam,org.apache.hadoop.hdfs.web.resources.ExcludeDatanodesParam,org.apache.hadoop.hdfs.web.resources.CreateFlagParam,org.apache.hadoop.hdfs.web.resources.StoragePolicyParam) throws java.io.IOException,java.lang.InterruptedException, with URI template, "/", is treated as a resource method
namenode_1  |   WARNING: A sub-resource method, public javax.ws.rs.core.Response org.apache.hadoop.hdfs.server.namenode.web.resources.NamenodeWebHdfsMethods.getRoot(org.apache.hadoop.security.UserGroupInformation,org.apache.hadoop.hdfs.web.resources.DelegationParam,org.apache.hadoop.hdfs.web.resources.UserParam,org.apache.hadoop.hdfs.web.resources.DoAsParam,org.apache.hadoop.hdfs.web.resources.GetOpParam,org.apache.hadoop.hdfs.web.resources.OffsetParam,org.apache.hadoop.hdfs.web.resources.LengthParam,org.apache.hadoop.hdfs.web.resources.RenewerParam,org.apache.hadoop.hdfs.web.resources.BufferSizeParam,java.util.List,org.apache.hadoop.hdfs.web.resources.XAttrEncodingParam,org.apache.hadoop.hdfs.web.resources.ExcludeDatanodesParam,org.apache.hadoop.hdfs.web.resources.FsActionParam,org.apache.hadoop.hdfs.web.resources.TokenKindParam,org.apache.hadoop.hdfs.web.resources.TokenServiceParam) throws java.io.IOException,java.lang.InterruptedException, with URI template, "/", is treated as a resource method
test_1      | DEBUG	http://namenode:50070 "GET /webhdfs/v1/?user.name=root&op=LISTSTATUS HTTP/1.1" 200 None
test_1      | DEBUG	Uploading 1 files using 1 thread(s).
test_1      | DEBUG	Uploading 'kinglear.txt' to '/kinglear.txt'.
test_1      | INFO	Writing to '/kinglear.txt'.
test_1      | DEBUG	Resolved path '/kinglear.txt' to '/kinglear.txt'.
test_1      | DEBUG	http://namenode:50070 "PUT /webhdfs/v1/kinglear.txt?user.name=root&overwrite=False&op=CREATE HTTP/1.1" 307 0
test_1      | DEBUG	Starting new HTTP connection (1): datanode
datanode_1  | 18/05/09 03:38:20 INFO datanode.webhdfs: 172.18.0.4 PUT /webhdfs/v1/kinglear.txt?op=CREATE&user.name=root&namenoderpcaddress=namenode:8020&createflag=&createparent=true&overwrite=false&user.name=root 201
namenode_1  | 18/05/09 03:38:20 INFO hdfs.StateChange: BLOCK* allocate blk_1073741825_1001, replicas=172.18.0.3:50010 for /kinglear.txt
datanode_1  | 18/05/09 03:38:20 INFO datanode.DataNode: Receiving BP-892499668-172.18.0.2-1525837052284:blk_1073741825_1001 src: /172.18.0.3:56130 dest: /172.18.0.3:50010
datanode_1  | 18/05/09 03:38:21 INFO DataNode.clienttrace: src: /172.18.0.3:56130, dest: /172.18.0.3:50010, bytes: 157283, op: HDFS_WRITE, cliID: DFSClient_NONMAPREDUCE_-296196038_67, offset: 0, srvID: ea93857e-c0c6-494c-887b-ab0ab1e96b58, blockid: BP-892499668-172.18.0.2-1525837052284:blk_1073741825_1001, duration: 11283719
datanode_1  | 18/05/09 03:38:21 INFO datanode.DataNode: PacketResponder: BP-892499668-172.18.0.2-1525837052284:blk_1073741825_1001, type=LAST_IN_PIPELINE terminating
namenode_1  | 18/05/09 03:38:21 INFO namenode.FSNamesystem: BLOCK* blk_1073741825_1001 is COMMITTED but not COMPLETE(numNodes= 0 <  minimum = 1) in file /kinglear.txt
namenode_1  | 18/05/09 03:38:21 INFO namenode.EditLogFileOutputStream: Nothing to flush
namenode_1  | 18/05/09 03:38:21 INFO hdfs.StateChange: DIR* completeFile: /kinglear.txt is closed by DFSClient_NONMAPREDUCE_-296196038_67
test_1      | DEBUG	Upload of 'kinglear.txt' to '/kinglear.txt' complete.
test_1      | INFO:root:Missing pipeline option (runner). Executing pipeline using the default runner: DirectRunner.
test_1      | INFO:root:==================== <function annotate_downstream_side_inputs at 0x7fbf21cccc80> ====================
test_1      | INFO:root:==================== <function lift_combiners at 0x7fbf21cccf50> ====================
test_1      | INFO:root:==================== <function expand_gbk at 0x7fbf21cccb18> ====================
test_1      | INFO:root:==================== <function sink_flattens at 0x7fbf21ccc9b0> ====================
test_1      | INFO:root:==================== <function greedily_fuse at 0x7fbf21ccc488> ====================
test_1      | INFO:root:==================== <function sort_stages at 0x7fbf21ccc7d0> ====================
test_1      | INFO:root:Running (((ref_AppliedPTransform_write/Write/WriteImpl/DoOnce/Read_16)+(ref_AppliedPTransform_write/Write/WriteImpl/InitializeWrite_17))+(ref_PCollection_PCollection_9/Write))+(ref_PCollection_PCollection_10/Write)
test_1      | INFO:root:start <DataOutputOperation ref_PCollection_PCollection_10/Write >
test_1      | INFO:root:start <DataOutputOperation ref_PCollection_PCollection_9/Write >
test_1      | INFO:root:start <DoOperation write/Write/WriteImpl/InitializeWrite output_tags=['out']>
test_1      | INFO:root:start <ReadOperation write/Write/WriteImpl/DoOnce/Read source=SourceBundle(weight=1.0, source=<apache_beam.transforms.core._CreateSource object at 0x7fbf21c94cd0>, start_position=None, stop_position=None)>
test_1      | INFO:root:finish <ReadOperation write/Write/WriteImpl/DoOnce/Read source=SourceBundle(weight=1.0, source=<apache_beam.transforms.core._CreateSource object at 0x7fbf21c94cd0>, start_position=None, stop_position=None), receivers=[ConsumerSet[write/Write/WriteImpl/DoOnce/Read.out0, coder=WindowedValueCoder[FastPrimitivesCoder], len(consumers)=2]]>
test_1      | INFO:root:finish <DoOperation write/Write/WriteImpl/InitializeWrite output_tags=['out'], receivers=[ConsumerSet[write/Write/WriteImpl/InitializeWrite.out0, coder=WindowedValueCoder[FastPrimitivesCoder], len(consumers)=1]]>
test_1      | INFO:root:finish <DataOutputOperation ref_PCollection_PCollection_9/Write >
test_1      | INFO:root:finish <DataOutputOperation ref_PCollection_PCollection_10/Write >
test_1      | INFO:root:Running (ref_AppliedPTransform_read/Read_3)+((ref_AppliedPTransform_split_4)+((ref_AppliedPTransform_pair_with_one_5)+(group/Write)))
test_1      | INFO:root:start <DataOutputOperation group/Write >
test_1      | INFO:root:start <DoOperation pair_with_one output_tags=['out']>
test_1      | INFO:root:start <DoOperation split output_tags=['out']>
test_1      | INFO:root:start <ReadOperation read/Read source=SourceBundle(weight=1.0, source=<apache_beam.io.textio._TextSource object at 0x7fbf21ca38d0>, start_position=None, stop_position=None)>
datanode_1  | 18/05/09 03:38:22 INFO datanode.webhdfs: 172.18.0.4 GET /webhdfs/v1/kinglear.txt?op=OPEN&user.name=root&namenoderpcaddress=namenode:8020&length=157284&offset=0 200
test_1      | INFO:root:finish <ReadOperation read/Read source=SourceBundle(weight=1.0, source=<apache_beam.io.textio._TextSource object at 0x7fbf21ca38d0>, start_position=None, stop_position=None), receivers=[ConsumerSet[read/Read.out0, coder=WindowedValueCoder[FastPrimitivesCoder], len(consumers)=1]]>
test_1      | INFO:root:finish <DoOperation split output_tags=['out'], receivers=[ConsumerSet[split.out0, coder=WindowedValueCoder[StrUtf8Coder], len(consumers)=1]]>
test_1      | INFO:root:finish <DoOperation pair_with_one output_tags=['out'], receivers=[ConsumerSet[pair_with_one.out0, coder=WindowedValueCoder[TupleCoder[LengthPrefixCoder[StrUtf8Coder], VarIntCoder]], len(consumers)=1]]>
test_1      | INFO:root:finish <DataOutputOperation group/Write >
test_1      | INFO:root:Running (((group/Read)+((ref_AppliedPTransform_count_10)+(ref_AppliedPTransform_format_11)))+(ref_AppliedPTransform_write/Write/WriteImpl/WriteBundles_18))+((ref_AppliedPTransform_write/Write/WriteImpl/Pair_19)+((ref_AppliedPTransform_write/Write/WriteImpl/WindowInto(WindowIntoFn)_20)+(write/Write/WriteImpl/GroupByKey/Write)))
test_1      | INFO:root:start <DataOutputOperation write/Write/WriteImpl/GroupByKey/Write >
test_1      | INFO:root:start <DoOperation write/Write/WriteImpl/WindowInto(WindowIntoFn) output_tags=['out']>
test_1      | INFO:root:start <DoOperation write/Write/WriteImpl/Pair output_tags=['out']>
test_1      | INFO:root:start <DoOperation write/Write/WriteImpl/WriteBundles output_tags=['out']>
test_1      | INFO:root:start <DoOperation format output_tags=['out']>
test_1      | INFO:root:start <DoOperation count output_tags=['out']>
test_1      | INFO:root:start <DataInputOperation group/Read receivers=[ConsumerSet[group/Read.out0, coder=WindowedValueCoder[TupleCoder[LengthPrefixCoder[StrUtf8Coder], IterableCoder[VarIntCoder]]], len(consumers)=1]]>
test_1      | WARNING:root:Mime types are not supported. Got non-default mime_type: text/plain
datanode_1  | 18/05/09 03:38:24 INFO datanode.webhdfs: 172.18.0.4 PUT /webhdfs/v1/beam-temp-py-wordcount-integration-6c504a5e533a11e8823a0242ac120004/ac449561-e6d2-4fde-b06c-7e91f3114e11.py-wordcount-integration?op=CREATE&user.name=root&namenoderpcaddress=namenode:8020&createflag=&createparent=true&overwrite=false&user.name=root 201
test_1      | INFO:root:finish <DataInputOperation group/Read receivers=[ConsumerSet[group/Read.out0, coder=WindowedValueCoder[TupleCoder[LengthPrefixCoder[StrUtf8Coder], IterableCoder[VarIntCoder]]], len(consumers)=1]]>
test_1      | INFO:root:finish <DoOperation count output_tags=['out'], receivers=[ConsumerSet[count.out0, coder=WindowedValueCoder[TupleCoder[StrUtf8Coder, FastPrimitivesCoder]], len(consumers)=1]]>
test_1      | INFO:root:finish <DoOperation format output_tags=['out'], receivers=[ConsumerSet[format.out0, coder=WindowedValueCoder[FastPrimitivesCoder], len(consumers)=1]]>
test_1      | INFO:root:finish <DoOperation write/Write/WriteImpl/WriteBundles output_tags=['out'], receivers=[ConsumerSet[write/Write/WriteImpl/WriteBundles.out0, coder=WindowedValueCoder[FastPrimitivesCoder], len(consumers)=1]]>
namenode_1  | 18/05/09 03:38:24 INFO hdfs.StateChange: BLOCK* allocate blk_1073741826_1002, replicas=172.18.0.3:50010 for /beam-temp-py-wordcount-integration-6c504a5e533a11e8823a0242ac120004/ac449561-e6d2-4fde-b06c-7e91f3114e11.py-wordcount-integration
datanode_1  | 18/05/09 03:38:24 INFO datanode.DataNode: Receiving BP-892499668-172.18.0.2-1525837052284:blk_1073741826_1002 src: /172.18.0.3:56150 dest: /172.18.0.3:50010
datanode_1  | 18/05/09 03:38:24 INFO DataNode.clienttrace: src: /172.18.0.3:56150, dest: /172.18.0.3:50010, bytes: 48944, op: HDFS_WRITE, cliID: DFSClient_NONMAPREDUCE_972292288_69, offset: 0, srvID: ea93857e-c0c6-494c-887b-ab0ab1e96b58, blockid: BP-892499668-172.18.0.2-1525837052284:blk_1073741826_1002, duration: 3520869
datanode_1  | 18/05/09 03:38:24 INFO datanode.DataNode: PacketResponder: BP-892499668-172.18.0.2-1525837052284:blk_1073741826_1002, type=LAST_IN_PIPELINE terminating
namenode_1  | 18/05/09 03:38:24 INFO hdfs.StateChange: DIR* completeFile: /beam-temp-py-wordcount-integration-6c504a5e533a11e8823a0242ac120004/ac449561-e6d2-4fde-b06c-7e91f3114e11.py-wordcount-integration is closed by DFSClient_NONMAPREDUCE_972292288_69
test_1      | INFO:root:finish <DoOperation write/Write/WriteImpl/Pair output_tags=['out'], receivers=[ConsumerSet[write/Write/WriteImpl/Pair.out0, coder=WindowedValueCoder[TupleCoder[FastPrimitivesCoder, FastPrimitivesCoder]], len(consumers)=1]]>
test_1      | INFO:root:finish <DoOperation write/Write/WriteImpl/WindowInto(WindowIntoFn) output_tags=['out'], receivers=[ConsumerSet[write/Write/WriteImpl/WindowInto(WindowIntoFn).out0, coder=WindowedValueCoder[TupleCoder[LengthPrefixCoder[FastPrimitivesCoder], LengthPrefixCoder[FastPrimitivesCoder]]], len(consumers)=1]]>
test_1      | INFO:root:finish <DataOutputOperation write/Write/WriteImpl/GroupByKey/Write >
test_1      | INFO:root:Running (write/Write/WriteImpl/GroupByKey/Read)+((ref_AppliedPTransform_write/Write/WriteImpl/Extract_25)+(ref_PCollection_PCollection_17/Write))
test_1      | INFO:root:start <DataOutputOperation ref_PCollection_PCollection_17/Write >
test_1      | INFO:root:start <DoOperation write/Write/WriteImpl/Extract output_tags=['out']>
test_1      | INFO:root:start <DataInputOperation write/Write/WriteImpl/GroupByKey/Read receivers=[ConsumerSet[write/Write/WriteImpl/GroupByKey/Read.out0, coder=WindowedValueCoder[TupleCoder[LengthPrefixCoder[FastPrimitivesCoder], IterableCoder[LengthPrefixCoder[FastPrimitivesCoder]]]], len(consumers)=1]]>
test_1      | INFO:root:finish <DataInputOperation write/Write/WriteImpl/GroupByKey/Read receivers=[ConsumerSet[write/Write/WriteImpl/GroupByKey/Read.out0, coder=WindowedValueCoder[TupleCoder[LengthPrefixCoder[FastPrimitivesCoder], IterableCoder[LengthPrefixCoder[FastPrimitivesCoder]]]], len(consumers)=1]]>
test_1      | INFO:root:finish <DoOperation write/Write/WriteImpl/Extract output_tags=['out'], receivers=[ConsumerSet[write/Write/WriteImpl/Extract.out0, coder=WindowedValueCoder[FastPrimitivesCoder], len(consumers)=1]]>
test_1      | INFO:root:finish <DataOutputOperation ref_PCollection_PCollection_17/Write >
test_1      | INFO:root:Running ((ref_PCollection_PCollection_9/Read)+(ref_AppliedPTransform_write/Write/WriteImpl/PreFinalize_26))+(ref_PCollection_PCollection_18/Write)
test_1      | INFO:root:start <DataOutputOperation ref_PCollection_PCollection_18/Write >
test_1      | INFO:root:start <DoOperation write/Write/WriteImpl/PreFinalize output_tags=['out']>
test_1      | INFO:root:start <DataInputOperation ref_PCollection_PCollection_9/Read receivers=[ConsumerSet[ref_PCollection_PCollection_9/Read.out0, coder=WindowedValueCoder[FastPrimitivesCoder], len(consumers)=1]]>
test_1      | INFO:root:finish <DataInputOperation ref_PCollection_PCollection_9/Read receivers=[ConsumerSet[ref_PCollection_PCollection_9/Read.out0, coder=WindowedValueCoder[FastPrimitivesCoder], len(consumers)=1]]>
test_1      | INFO:root:finish <DoOperation write/Write/WriteImpl/PreFinalize output_tags=['out'], receivers=[ConsumerSet[write/Write/WriteImpl/PreFinalize.out0, coder=WindowedValueCoder[FastPrimitivesCoder], len(consumers)=1]]>
test_1      | INFO:root:finish <DataOutputOperation ref_PCollection_PCollection_18/Write >
test_1      | INFO:root:Running (ref_PCollection_PCollection_9/Read)+(ref_AppliedPTransform_write/Write/WriteImpl/FinalizeWrite_27)
test_1      | INFO:root:start <DoOperation write/Write/WriteImpl/FinalizeWrite output_tags=['out']>
test_1      | INFO:root:start <DataInputOperation ref_PCollection_PCollection_9/Read receivers=[ConsumerSet[ref_PCollection_PCollection_9/Read.out0, coder=WindowedValueCoder[FastPrimitivesCoder], len(consumers)=1]]>
test_1      | INFO:root:Starting finalize_write threads with num_shards: 1 (skipped: 0), batches: 1, num_threads: 1
test_1      | INFO:root:Renamed 1 shards in 0.14 seconds.
test_1      | INFO:root:finish <DataInputOperation ref_PCollection_PCollection_9/Read receivers=[ConsumerSet[ref_PCollection_PCollection_9/Read.out0, coder=WindowedValueCoder[FastPrimitivesCoder], len(consumers)=1]]>
test_1      | INFO:root:finish <DoOperation write/Write/WriteImpl/FinalizeWrite output_tags=['out'], receivers=[ConsumerSet[write/Write/WriteImpl/FinalizeWrite.out0, coder=WindowedValueCoder[FastPrimitivesCoder], len(consumers)=0]]>
test_1      | INFO:root:number of empty lines: 1663
test_1      | INFO:root:average word length: 4
hdfs_it-jenkins-beam_postcommit_python_verify-4904_test_1 exited with code 0
Stopping hdfs_it-jenkins-beam_postcommit_python_verify-4904_datanode_1 ... 
Stopping hdfs_it-jenkins-beam_postcommit_python_verify-4904_namenode_1 ... 
Stopping hdfs_it-jenkins-beam_postcommit_python_verify-4904_datanode_1 ... doneStopping hdfs_it-jenkins-beam_postcommit_python_verify-4904_namenode_1 ... doneAborting on container exit...

real	1m16.534s
user	0m0.805s
sys	0m0.159s
:pythonPostCommit (Thread[Task worker for ':' Thread 2,5,main]) completed. Took 38 mins 27.367 secs.

BUILD SUCCESSFUL in 38m 33s
1 actionable task: 1 executed

Publishing build scan...
https://gradle.com/s/e5dpneixhplsu

Build step 'Invoke Gradle script' changed build result to SUCCESS
[Set GitHub commit status (universal)] SUCCESS on repos [GHRepository@6fe960c5[description=Apache Beam,homepage=,name=beam,fork=false,size=58754,milestones={},language=Java,commits={},source=<null>,parent=<null>,responseHeaderFields={null=[HTTP/1.1 200 OK], Access-Control-Allow-Origin=[*], Access-Control-Expose-Headers=[ETag, Link, Retry-After, X-GitHub-OTP, X-RateLimit-Limit, X-RateLimit-Remaining, X-RateLimit-Reset, X-OAuth-Scopes, X-Accepted-OAuth-Scopes, X-Poll-Interval], Cache-Control=[private, max-age=60, s-maxage=60], Content-Encoding=[gzip], Content-Security-Policy=[default-src 'none'], Content-Type=[application/json; charset=utf-8], Date=[Wed, 09 May 2018 03:38:51 GMT], ETag=[W/"f271032ca1513bfd5e28400c832a5d21"], Last-Modified=[Wed, 09 May 2018 00:26:13 GMT], OkHttp-Received-Millis=[1525837131315], OkHttp-Response-Source=[NETWORK 200], OkHttp-Selected-Protocol=[http/1.1], OkHttp-Sent-Millis=[1525837131178], Referrer-Policy=[origin-when-cross-origin, strict-origin-when-cross-origin], Server=[GitHub.com], Status=[200 OK], Strict-Transport-Security=[max-age=31536000; includeSubdomains; preload], Transfer-Encoding=[chunked], Vary=[Accept, Authorization, Cookie, X-GitHub-OTP], X-Accepted-OAuth-Scopes=[repo], X-Content-Type-Options=[nosniff], X-Frame-Options=[deny], X-GitHub-Media-Type=[github.v3; format=json], X-GitHub-Request-Id=[D330:4F42:EBFACD:1DD7C12:5AF26D1B], X-OAuth-Scopes=[admin:repo_hook, repo, repo:status], X-RateLimit-Limit=[5000], X-RateLimit-Remaining=[4995], X-RateLimit-Reset=[1525840379], X-Runtime-rack=[0.049944], X-XSS-Protection=[1; mode=block]},url=https://api.github.com/repos/apache/beam,id=50904245]] (sha:60f90c8) with context:beam_PostCommit_Python_Verify
Setting commit status on GitHub for https://github.com/apache/beam/commit/60f90c8dcb229c35a82c7be15e64a89678bae058
ERROR: Build step failed with exception
java.io.FileNotFoundException: https://api.github.com/repos/apache/beam/statuses/60f90c8dcb229c35a82c7be15e64a89678bae058
	at com.squareup.okhttp.internal.huc.HttpURLConnectionImpl.getInputStream(HttpURLConnectionImpl.java:243)
	at com.squareup.okhttp.internal.huc.DelegatingHttpsURLConnection.getInputStream(DelegatingHttpsURLConnection.java:210)
	at com.squareup.okhttp.internal.huc.HttpsURLConnectionImpl.getInputStream(HttpsURLConnectionImpl.java:25)
	at org.kohsuke.github.Requester.parse(Requester.java:612)
	at org.kohsuke.github.Requester.parse(Requester.java:594)
	at org.kohsuke.github.Requester._to(Requester.java:272)
Caused: org.kohsuke.github.GHFileNotFoundException: {"message":"Not Found","documentation_url":"https://developer.github.com/v3/repos/statuses/#create-a-status"}
	at org.kohsuke.github.Requester.handleApiError(Requester.java:686)
	at org.kohsuke.github.Requester._to(Requester.java:293)
	at org.kohsuke.github.Requester.to(Requester.java:234)
	at org.kohsuke.github.GHRepository.createCommitStatus(GHRepository.java:1075)
	at org.jenkinsci.plugins.github.status.GitHubCommitStatusSetter.perform(GitHubCommitStatusSetter.java:160)
Caused: org.jenkinsci.plugins.github.common.CombineErrorHandler$ErrorHandlingException
	at org.jenkinsci.plugins.github.common.CombineErrorHandler.handle(CombineErrorHandler.java:74)
	at org.jenkinsci.plugins.github.status.GitHubCommitStatusSetter.perform(GitHubCommitStatusSetter.java:164)
	at com.cloudbees.jenkins.GitHubCommitNotifier.perform(GitHubCommitNotifier.java:151)
	at hudson.tasks.BuildStepCompatibilityLayer.perform(BuildStepCompatibilityLayer.java:81)
	at hudson.tasks.BuildStepMonitor$1.perform(BuildStepMonitor.java:20)
	at hudson.model.AbstractBuild$AbstractBuildExecution.perform(AbstractBuild.java:744)
	at hudson.model.AbstractBuild$AbstractBuildExecution.performAllBuildSteps(AbstractBuild.java:690)
	at hudson.model.Build$BuildExecution.post2(Build.java:186)
	at hudson.model.AbstractBuild$AbstractBuildExecution.post(AbstractBuild.java:635)
	at hudson.model.Run.execute(Run.java:1749)
	at hudson.model.FreeStyleBuild.run(FreeStyleBuild.java:43)
	at hudson.model.ResourceController.execute(ResourceController.java:97)
	at hudson.model.Executor.run(Executor.java:429)
Build step 'Set build status on GitHub commit [deprecated]' marked build as failure


Jenkins build is back to normal : beam_PostCommit_Python_Verify #4912

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_Python_Verify/4912/display/redirect>


Build failed in Jenkins: beam_PostCommit_Python_Verify #4911

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_Python_Verify/4911/display/redirect>

------------------------------------------
[...truncated 1.32 MB...]
                }, 
                {
                  "@type": "kind:interval_window"
                }
              ], 
              "is_wrapper": true
            }, 
            "output_name": "out", 
            "user_name": "count.out"
          }
        ], 
        "parallel_input": {
          "@type": "OutputReference", 
          "output_name": "out", 
          "step_name": "s6"
        }, 
        "serialized_fn": "ref_AppliedPTransform_count_10", 
        "user_name": "count"
      }
    }, 
    {
      "kind": "ParallelDo", 
      "name": "s8", 
      "properties": {
        "display_data": [
          {
            "key": "fn", 
            "label": "Transform Function", 
            "namespace": "apache_beam.transforms.core.CallableWrapperDoFn", 
            "type": "STRING", 
            "value": "format_result"
          }, 
          {
            "key": "fn", 
            "label": "Transform Function", 
            "namespace": "apache_beam.transforms.core.ParDo", 
            "shortValue": "CallableWrapperDoFn", 
            "type": "STRING", 
            "value": "apache_beam.transforms.core.CallableWrapperDoFn"
          }
        ], 
        "non_parallel_inputs": {}, 
        "output_info": [
          {
            "encoding": {
              "@type": "kind:windowed_value", 
              "component_encodings": [
                {
                  "@type": "FastPrimitivesCoder$eNprYEpOLEhMzkiNT0pNzNVLzk9JLSqGUlxuicUlAUWZuZklmWWpxc4gQa5CBs3GQsbaQqZQ/vi0xJycpMTk7Hiw+kJmPEYFZCZn56RCjWABGsFaW8iWVJykBwDlGS3/", 
                  "component_encodings": [
                    {
                      "@type": "FastPrimitivesCoder$eNprYEpOLEhMzkiNT0pNzNVLzk9JLSqGUlxuicUlAUWZuZklmWWpxc4gQa5CBs3GQsbaQqZQ/vi0xJycpMTk7Hiw+kJmPEYFZCZn56RCjWABGsFaW8iWVJykBwDlGS3/", 
                      "component_encodings": []
                    }, 
                    {
                      "@type": "FastPrimitivesCoder$eNprYEpOLEhMzkiNT0pNzNVLzk9JLSqGUlxuicUlAUWZuZklmWWpxc4gQa5CBs3GQsbaQqZQ/vi0xJycpMTk7Hiw+kJmPEYFZCZn56RCjWABGsFaW8iWVJykBwDlGS3/", 
                      "component_encodings": []
                    }
                  ], 
                  "is_pair_like": true
                }, 
                {
                  "@type": "kind:interval_window"
                }
              ], 
              "is_wrapper": true
            }, 
            "output_name": "out", 
            "user_name": "format.out"
          }
        ], 
        "parallel_input": {
          "@type": "OutputReference", 
          "output_name": "out", 
          "step_name": "s7"
        }, 
        "serialized_fn": "ref_AppliedPTransform_format_11", 
        "user_name": "format"
      }
    }, 
    {
      "kind": "ParallelDo", 
      "name": "s9", 
      "properties": {
        "display_data": [
          {
            "key": "fn", 
            "label": "Transform Function", 
            "namespace": "apache_beam.transforms.core.CallableWrapperDoFn", 
            "type": "STRING", 
            "value": "<lambda>"
          }, 
          {
            "key": "fn", 
            "label": "Transform Function", 
            "namespace": "apache_beam.transforms.core.ParDo", 
            "shortValue": "CallableWrapperDoFn", 
            "type": "STRING", 
            "value": "apache_beam.transforms.core.CallableWrapperDoFn"
          }
        ], 
        "non_parallel_inputs": {}, 
        "output_info": [
          {
            "encoding": {
              "@type": "kind:windowed_value", 
              "component_encodings": [
                {
                  "@type": "kind:bytes"
                }, 
                {
                  "@type": "kind:interval_window"
                }
              ], 
              "is_wrapper": true
            }, 
            "output_name": "out", 
            "user_name": "WriteStringsToPubSub/EncodeString.out"
          }
        ], 
        "parallel_input": {
          "@type": "OutputReference", 
          "output_name": "out", 
          "step_name": "s8"
        }, 
        "serialized_fn": "ref_AppliedPTransform_WriteStringsToPubSub/EncodeString_13", 
        "user_name": "WriteStringsToPubSub/EncodeString"
      }
    }, 
    {
      "kind": "ParallelWrite", 
      "name": "s10", 
      "properties": {
        "display_data": [], 
        "encoding": {
          "@type": "kind:windowed_value", 
          "component_encodings": [
            {
              "@type": "kind:bytes"
            }, 
            {
              "@type": "kind:global_window"
            }
          ], 
          "is_wrapper": true
        }, 
        "format": "pubsub", 
        "parallel_input": {
          "@type": "OutputReference", 
          "output_name": "out", 
          "step_name": "s9"
        }, 
        "pubsub_topic": "projects/apache-beam-testing/topics/wc_topic_output50e6b5e3-6ad6-441b-96dd-ac8a24456125", 
        "user_name": "WriteStringsToPubSub/Write/NativeWrite"
      }
    }
  ], 
  "type": "JOB_TYPE_STREAMING"
}
root: INFO: Create job: <Job
 createTime: u'2018-05-09T21:30:16.857190Z'
 currentStateTime: u'1970-01-01T00:00:00Z'
 id: u'2018-05-09_14_30_16-5406977071237026756'
 location: u'us-central1'
 name: u'beamapp-jenkins-0509213014-939940'
 projectId: u'apache-beam-testing'
 stageStates: []
 steps: []
 tempFiles: []
 type: TypeValueValuesEnum(JOB_TYPE_STREAMING, 2)>
root: INFO: Created job with id: [2018-05-09_14_30_16-5406977071237026756]
root: INFO: To access the Dataflow monitoring console, please navigate to https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2018-05-09_14_30_16-5406977071237026756?project=apache-beam-testing
root: INFO: Job 2018-05-09_14_30_16-5406977071237026756 is in state JOB_STATE_RUNNING
root: INFO: 2018-05-09T21:30:18.483Z: JOB_MESSAGE_DETAILED: Checking required Cloud APIs are enabled.
root: INFO: 2018-05-09T21:30:18.851Z: JOB_MESSAGE_DETAILED: Checking permissions granted to controller Service Account.
root: INFO: 2018-05-09T21:30:19.214Z: JOB_MESSAGE_BASIC: Worker configuration: n1-standard-4 in us-central1-b.
root: INFO: 2018-05-09T21:30:19.493Z: JOB_MESSAGE_DETAILED: Expanding CollectionToSingleton operations into optimizable parts.
root: INFO: 2018-05-09T21:30:19.500Z: JOB_MESSAGE_DETAILED: Expanding CoGroupByKey operations into optimizable parts.
root: INFO: 2018-05-09T21:30:19.506Z: JOB_MESSAGE_DETAILED: Expanding SplittableProcessKeyed operations into optimizable parts.
root: INFO: 2018-05-09T21:30:19.508Z: JOB_MESSAGE_DETAILED: Expanding GroupByKey operations into streaming Read/Write steps
root: INFO: 2018-05-09T21:30:19.514Z: JOB_MESSAGE_DEBUG: Annotating graph with Autotuner information.
root: INFO: 2018-05-09T21:30:19.524Z: JOB_MESSAGE_DETAILED: Fusing adjacent ParDo, Read, Write, and Flatten operations
root: INFO: 2018-05-09T21:30:19.526Z: JOB_MESSAGE_DETAILED: Fusing consumer ReadStringsFromPubSub/DecodeString into ReadStringsFromPubSub/ReadFromPubSub/Read
root: INFO: 2018-05-09T21:30:19.529Z: JOB_MESSAGE_DETAILED: Fusing consumer pair_with_one into split
root: INFO: 2018-05-09T21:30:19.531Z: JOB_MESSAGE_DETAILED: Fusing consumer count into group/MergeBuckets
root: INFO: 2018-05-09T21:30:19.533Z: JOB_MESSAGE_DETAILED: Fusing consumer WriteStringsToPubSub/Write/NativeWrite into WriteStringsToPubSub/EncodeString
root: INFO: 2018-05-09T21:30:19.535Z: JOB_MESSAGE_DETAILED: Fusing consumer WriteStringsToPubSub/EncodeString into format
root: INFO: 2018-05-09T21:30:19.537Z: JOB_MESSAGE_DETAILED: Fusing consumer group/MergeBuckets into group/ReadStream
root: INFO: 2018-05-09T21:30:19.540Z: JOB_MESSAGE_DETAILED: Fusing consumer format into count
root: INFO: 2018-05-09T21:30:19.541Z: JOB_MESSAGE_DETAILED: Fusing consumer group/WriteStream into WindowInto(WindowIntoFn)
root: INFO: 2018-05-09T21:30:19.544Z: JOB_MESSAGE_DETAILED: Fusing consumer split into ReadStringsFromPubSub/DecodeString
root: INFO: 2018-05-09T21:30:19.546Z: JOB_MESSAGE_DETAILED: Fusing consumer WindowInto(WindowIntoFn) into pair_with_one
root: INFO: 2018-05-09T21:30:19.549Z: JOB_MESSAGE_DEBUG: Adding StepResource setup and teardown to workflow graph.
root: INFO: 2018-05-09T21:30:19.582Z: JOB_MESSAGE_DEBUG: Adding workflow start and stop steps.
root: INFO: 2018-05-09T21:30:19.593Z: JOB_MESSAGE_DEBUG: Assigning stage ids.
root: INFO: 2018-05-09T21:30:19.738Z: JOB_MESSAGE_DEBUG: Executing wait step start2
root: INFO: 2018-05-09T21:30:19.784Z: JOB_MESSAGE_DEBUG: Starting worker pool setup.
root: INFO: 2018-05-09T21:30:19.791Z: JOB_MESSAGE_BASIC: Starting 1 workers...
root: INFO: 2018-05-09T21:30:22.115Z: JOB_MESSAGE_BASIC: Executing operation ReadStringsFromPubSub/ReadFromPubSub/Read+ReadStringsFromPubSub/DecodeString+split+pair_with_one+WindowInto(WindowIntoFn)+group/WriteStream
root: INFO: 2018-05-09T21:30:22.115Z: JOB_MESSAGE_BASIC: Executing operation group/ReadStream+group/MergeBuckets+count+format+WriteStringsToPubSub/EncodeString+WriteStringsToPubSub/Write/NativeWrite
root: INFO: 2018-05-09T21:31:15.618Z: JOB_MESSAGE_DEBUG: Executing input step topology_init_attach_disk_input_step
root: INFO: 2018-05-09T21:31:36.008Z: JOB_MESSAGE_DETAILED: Workers have started successfully.
root: WARNING: Timing out on waiting for job 2018-05-09_14_30_16-5406977071237026756 after 183 seconds
google.auth.transport._http_client: DEBUG: Making request: GET http://169.254.169.254
google.auth.transport._http_client: DEBUG: Making request: GET http://metadata.google.internal/computeMetadata/v1/project/project-id
root: DEBUG: Start pulling messages from projects/apache-beam-testing/subscriptions/wc_subscription_output50e6b5e3-6ad6-441b-96dd-ac8a24456125
google_auth_httplib2: DEBUG: Making request: GET http://metadata.google.internal/computeMetadata/v1/instance/service-accounts/default/?recursive=true
google_auth_httplib2: DEBUG: Making request: GET http://metadata.google.internal/computeMetadata/v1/instance/service-accounts/844138762903-compute@developer.gserviceaccount.com/token
root: ERROR: Timeout after 400 sec. Received 0 messages from projects/apache-beam-testing/subscriptions/wc_subscription_output50e6b5e3-6ad6-441b-96dd-ac8a24456125.
--------------------- >> end captured logging << ---------------------

----------------------------------------------------------------------
XML: <https://builds.apache.org/job/beam_PostCommit_Python_Verify/ws/src/sdks/python/nosetests.xml>
----------------------------------------------------------------------
Ran 4 tests in 928.424s

FAILED (errors=2, failures=1)
Found: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2018-05-09_14_30_03-17418746177499869199?project=apache-beam-testing.
Found: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2018-05-09_14_30_04-6608455032604507168?project=apache-beam-testing.
Found: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2018-05-09_14_30_04-6445804603140249070?project=apache-beam-testing.
Found: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2018-05-09_14_30_16-5406977071237026756?project=apache-beam-testing.

> Task :pythonPostCommit FAILED
:pythonPostCommit (Thread[Task worker for ':' Thread 8,5,main]) completed. Took 45 mins 7.303 secs.

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_Python_Verify/ws/src/build.gradle'> line: 167

* What went wrong:
Execution failed for task ':pythonPostCommit'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

BUILD FAILED in 45m 14s
1 actionable task: 1 executed

Publishing build scan...
https://gradle.com/s/cbfmuzu5jdkxk

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure
Not sending mail to unregistered user yifanzou@yifanzou-linuxworkstation.sea.corp.google.com
Not sending mail to unregistered user github@alasdairhodge.co.uk

Build failed in Jenkins: beam_PostCommit_Python_Verify #4910

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_Python_Verify/4910/display/redirect>

------------------------------------------
[...truncated 1.32 MB...]
                }, 
                {
                  "@type": "kind:interval_window"
                }
              ], 
              "is_wrapper": true
            }, 
            "output_name": "out", 
            "user_name": "count.out"
          }
        ], 
        "parallel_input": {
          "@type": "OutputReference", 
          "output_name": "out", 
          "step_name": "s6"
        }, 
        "serialized_fn": "ref_AppliedPTransform_count_10", 
        "user_name": "count"
      }
    }, 
    {
      "kind": "ParallelDo", 
      "name": "s8", 
      "properties": {
        "display_data": [
          {
            "key": "fn", 
            "label": "Transform Function", 
            "namespace": "apache_beam.transforms.core.CallableWrapperDoFn", 
            "type": "STRING", 
            "value": "format_result"
          }, 
          {
            "key": "fn", 
            "label": "Transform Function", 
            "namespace": "apache_beam.transforms.core.ParDo", 
            "shortValue": "CallableWrapperDoFn", 
            "type": "STRING", 
            "value": "apache_beam.transforms.core.CallableWrapperDoFn"
          }
        ], 
        "non_parallel_inputs": {}, 
        "output_info": [
          {
            "encoding": {
              "@type": "kind:windowed_value", 
              "component_encodings": [
                {
                  "@type": "FastPrimitivesCoder$eNprYEpOLEhMzkiNT0pNzNVLzk9JLSqGUlxuicUlAUWZuZklmWWpxc4gQa5CBs3GQsbaQqZQ/vi0xJycpMTk7Hiw+kJmPEYFZCZn56RCjWABGsFaW8iWVJykBwDlGS3/", 
                  "component_encodings": [
                    {
                      "@type": "FastPrimitivesCoder$eNprYEpOLEhMzkiNT0pNzNVLzk9JLSqGUlxuicUlAUWZuZklmWWpxc4gQa5CBs3GQsbaQqZQ/vi0xJycpMTk7Hiw+kJmPEYFZCZn56RCjWABGsFaW8iWVJykBwDlGS3/", 
                      "component_encodings": []
                    }, 
                    {
                      "@type": "FastPrimitivesCoder$eNprYEpOLEhMzkiNT0pNzNVLzk9JLSqGUlxuicUlAUWZuZklmWWpxc4gQa5CBs3GQsbaQqZQ/vi0xJycpMTk7Hiw+kJmPEYFZCZn56RCjWABGsFaW8iWVJykBwDlGS3/", 
                      "component_encodings": []
                    }
                  ], 
                  "is_pair_like": true
                }, 
                {
                  "@type": "kind:interval_window"
                }
              ], 
              "is_wrapper": true
            }, 
            "output_name": "out", 
            "user_name": "format.out"
          }
        ], 
        "parallel_input": {
          "@type": "OutputReference", 
          "output_name": "out", 
          "step_name": "s7"
        }, 
        "serialized_fn": "ref_AppliedPTransform_format_11", 
        "user_name": "format"
      }
    }, 
    {
      "kind": "ParallelDo", 
      "name": "s9", 
      "properties": {
        "display_data": [
          {
            "key": "fn", 
            "label": "Transform Function", 
            "namespace": "apache_beam.transforms.core.CallableWrapperDoFn", 
            "type": "STRING", 
            "value": "<lambda>"
          }, 
          {
            "key": "fn", 
            "label": "Transform Function", 
            "namespace": "apache_beam.transforms.core.ParDo", 
            "shortValue": "CallableWrapperDoFn", 
            "type": "STRING", 
            "value": "apache_beam.transforms.core.CallableWrapperDoFn"
          }
        ], 
        "non_parallel_inputs": {}, 
        "output_info": [
          {
            "encoding": {
              "@type": "kind:windowed_value", 
              "component_encodings": [
                {
                  "@type": "kind:bytes"
                }, 
                {
                  "@type": "kind:interval_window"
                }
              ], 
              "is_wrapper": true
            }, 
            "output_name": "out", 
            "user_name": "WriteStringsToPubSub/EncodeString.out"
          }
        ], 
        "parallel_input": {
          "@type": "OutputReference", 
          "output_name": "out", 
          "step_name": "s8"
        }, 
        "serialized_fn": "ref_AppliedPTransform_WriteStringsToPubSub/EncodeString_13", 
        "user_name": "WriteStringsToPubSub/EncodeString"
      }
    }, 
    {
      "kind": "ParallelWrite", 
      "name": "s10", 
      "properties": {
        "display_data": [], 
        "encoding": {
          "@type": "kind:windowed_value", 
          "component_encodings": [
            {
              "@type": "kind:bytes"
            }, 
            {
              "@type": "kind:global_window"
            }
          ], 
          "is_wrapper": true
        }, 
        "format": "pubsub", 
        "parallel_input": {
          "@type": "OutputReference", 
          "output_name": "out", 
          "step_name": "s9"
        }, 
        "pubsub_topic": "projects/apache-beam-testing/topics/wc_topic_output3ba13342-4ad9-48a3-bdab-017738c83309", 
        "user_name": "WriteStringsToPubSub/Write/NativeWrite"
      }
    }
  ], 
  "type": "JOB_TYPE_STREAMING"
}
root: INFO: Create job: <Job
 createTime: u'2018-05-09T19:26:22.468390Z'
 currentStateTime: u'1970-01-01T00:00:00Z'
 id: u'2018-05-09_12_26_21-6380409199884087082'
 location: u'us-central1'
 name: u'beamapp-jenkins-0509192620-223522'
 projectId: u'apache-beam-testing'
 stageStates: []
 steps: []
 tempFiles: []
 type: TypeValueValuesEnum(JOB_TYPE_STREAMING, 2)>
root: INFO: Created job with id: [2018-05-09_12_26_21-6380409199884087082]
root: INFO: To access the Dataflow monitoring console, please navigate to https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2018-05-09_12_26_21-6380409199884087082?project=apache-beam-testing
root: INFO: Job 2018-05-09_12_26_21-6380409199884087082 is in state JOB_STATE_RUNNING
root: INFO: 2018-05-09T19:26:24.080Z: JOB_MESSAGE_DETAILED: Checking required Cloud APIs are enabled.
root: INFO: 2018-05-09T19:26:24.436Z: JOB_MESSAGE_DETAILED: Checking permissions granted to controller Service Account.
root: INFO: 2018-05-09T19:26:24.714Z: JOB_MESSAGE_BASIC: Worker configuration: n1-standard-4 in us-central1-b.
root: INFO: 2018-05-09T19:26:25.025Z: JOB_MESSAGE_DETAILED: Expanding CollectionToSingleton operations into optimizable parts.
root: INFO: 2018-05-09T19:26:25.032Z: JOB_MESSAGE_DETAILED: Expanding CoGroupByKey operations into optimizable parts.
root: INFO: 2018-05-09T19:26:25.038Z: JOB_MESSAGE_DETAILED: Expanding SplittableProcessKeyed operations into optimizable parts.
root: INFO: 2018-05-09T19:26:25.040Z: JOB_MESSAGE_DETAILED: Expanding GroupByKey operations into streaming Read/Write steps
root: INFO: 2018-05-09T19:26:25.045Z: JOB_MESSAGE_DEBUG: Annotating graph with Autotuner information.
root: INFO: 2018-05-09T19:26:25.056Z: JOB_MESSAGE_DETAILED: Fusing adjacent ParDo, Read, Write, and Flatten operations
root: INFO: 2018-05-09T19:26:25.059Z: JOB_MESSAGE_DETAILED: Fusing consumer ReadStringsFromPubSub/DecodeString into ReadStringsFromPubSub/ReadFromPubSub/Read
root: INFO: 2018-05-09T19:26:25.061Z: JOB_MESSAGE_DETAILED: Fusing consumer pair_with_one into split
root: INFO: 2018-05-09T19:26:25.063Z: JOB_MESSAGE_DETAILED: Fusing consumer count into group/MergeBuckets
root: INFO: 2018-05-09T19:26:25.065Z: JOB_MESSAGE_DETAILED: Fusing consumer WriteStringsToPubSub/Write/NativeWrite into WriteStringsToPubSub/EncodeString
root: INFO: 2018-05-09T19:26:25.067Z: JOB_MESSAGE_DETAILED: Fusing consumer WriteStringsToPubSub/EncodeString into format
root: INFO: 2018-05-09T19:26:25.069Z: JOB_MESSAGE_DETAILED: Fusing consumer group/MergeBuckets into group/ReadStream
root: INFO: 2018-05-09T19:26:25.072Z: JOB_MESSAGE_DETAILED: Fusing consumer format into count
root: INFO: 2018-05-09T19:26:25.074Z: JOB_MESSAGE_DETAILED: Fusing consumer group/WriteStream into WindowInto(WindowIntoFn)
root: INFO: 2018-05-09T19:26:25.077Z: JOB_MESSAGE_DETAILED: Fusing consumer split into ReadStringsFromPubSub/DecodeString
root: INFO: 2018-05-09T19:26:25.080Z: JOB_MESSAGE_DETAILED: Fusing consumer WindowInto(WindowIntoFn) into pair_with_one
root: INFO: 2018-05-09T19:26:25.084Z: JOB_MESSAGE_DEBUG: Adding StepResource setup and teardown to workflow graph.
root: INFO: 2018-05-09T19:26:25.103Z: JOB_MESSAGE_DEBUG: Adding workflow start and stop steps.
root: INFO: 2018-05-09T19:26:25.153Z: JOB_MESSAGE_DEBUG: Assigning stage ids.
root: INFO: 2018-05-09T19:26:25.327Z: JOB_MESSAGE_DEBUG: Executing wait step start2
root: INFO: 2018-05-09T19:26:25.371Z: JOB_MESSAGE_DEBUG: Starting worker pool setup.
root: INFO: 2018-05-09T19:26:25.376Z: JOB_MESSAGE_BASIC: Starting 1 workers...
root: INFO: 2018-05-09T19:26:27.796Z: JOB_MESSAGE_BASIC: Executing operation ReadStringsFromPubSub/ReadFromPubSub/Read+ReadStringsFromPubSub/DecodeString+split+pair_with_one+WindowInto(WindowIntoFn)+group/WriteStream
root: INFO: 2018-05-09T19:26:27.796Z: JOB_MESSAGE_BASIC: Executing operation group/ReadStream+group/MergeBuckets+count+format+WriteStringsToPubSub/EncodeString+WriteStringsToPubSub/Write/NativeWrite
root: INFO: 2018-05-09T19:26:45.592Z: JOB_MESSAGE_DEBUG: Executing input step topology_init_attach_disk_input_step
root: INFO: 2018-05-09T19:27:20.254Z: JOB_MESSAGE_DETAILED: Workers have started successfully.
root: WARNING: Timing out on waiting for job 2018-05-09_12_26_21-6380409199884087082 after 181 seconds
google.auth.transport._http_client: DEBUG: Making request: GET http://169.254.169.254
google.auth.transport._http_client: DEBUG: Making request: GET http://metadata.google.internal/computeMetadata/v1/project/project-id
root: DEBUG: Start pulling messages from projects/apache-beam-testing/subscriptions/wc_subscription_output3ba13342-4ad9-48a3-bdab-017738c83309
google_auth_httplib2: DEBUG: Making request: GET http://metadata.google.internal/computeMetadata/v1/instance/service-accounts/default/?recursive=true
google_auth_httplib2: DEBUG: Making request: GET http://metadata.google.internal/computeMetadata/v1/instance/service-accounts/844138762903-compute@developer.gserviceaccount.com/token
root: ERROR: Timeout after 400 sec. Received 0 messages from projects/apache-beam-testing/subscriptions/wc_subscription_output3ba13342-4ad9-48a3-bdab-017738c83309.
--------------------- >> end captured logging << ---------------------

----------------------------------------------------------------------
XML: <https://builds.apache.org/job/beam_PostCommit_Python_Verify/ws/src/sdks/python/nosetests.xml>
----------------------------------------------------------------------
Ran 4 tests in 942.953s

FAILED (errors=2, failures=1)
Found: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2018-05-09_12_26_11-1337644688943182074?project=apache-beam-testing.
Found: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2018-05-09_12_26_21-6380409199884087082?project=apache-beam-testing.
Found: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2018-05-09_12_26_11-701884661196224627?project=apache-beam-testing.
Found: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2018-05-09_12_26_11-9485065137344644260?project=apache-beam-testing.

> Task :pythonPostCommit FAILED
:pythonPostCommit (Thread[Task worker for ':' Thread 4,5,main]) completed. Took 41 mins 34.545 secs.

FAILURE: Build failed with an exception.

* Where:
Build file '<https://builds.apache.org/job/beam_PostCommit_Python_Verify/ws/src/build.gradle'> line: 167

* What went wrong:
Execution failed for task ':pythonPostCommit'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --debug option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

BUILD FAILED in 41m 40s
1 actionable task: 1 executed

Publishing build scan...
https://gradle.com/s/xk64eiaulhuv6

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure
Not sending mail to unregistered user yifanzou@yifanzou-linuxworkstation.sea.corp.google.com
Not sending mail to unregistered user github@alasdairhodge.co.uk

Build failed in Jenkins: beam_PostCommit_Python_Verify #4909

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_Python_Verify/4909/display/redirect?page=changes>

Changes:

[yifanzou] update machine numbers to 1..16 for inventory tests

[github] Revert "Enable githubCommitNotifier for post commits"

------------------------------------------
[...truncated 1.27 MB...]
namenode_1  | 18/05/09 17:57:27 INFO namenode.FSNamesystem: dfs.namenode.safemode.extension     = 30000
namenode_1  | 18/05/09 17:57:27 INFO metrics.TopMetrics: NNTop conf: dfs.namenode.top.window.num.buckets = 10
namenode_1  | 18/05/09 17:57:27 INFO metrics.TopMetrics: NNTop conf: dfs.namenode.top.num.users = 10
namenode_1  | 18/05/09 17:57:27 INFO metrics.TopMetrics: NNTop conf: dfs.namenode.top.windows.minutes = 1,5,25
namenode_1  | 18/05/09 17:57:27 INFO namenode.FSNamesystem: Retry cache on namenode is enabled
namenode_1  | 18/05/09 17:57:27 INFO namenode.FSNamesystem: Retry cache will use 0.03 of total heap and retry cache entry expiry time is 600000 millis
namenode_1  | 18/05/09 17:57:27 INFO util.GSet: Computing capacity for map NameNodeRetryCache
namenode_1  | 18/05/09 17:57:27 INFO util.GSet: VM type       = 64-bit
namenode_1  | 18/05/09 17:57:27 INFO util.GSet: 0.029999999329447746% max memory 958.5 MB = 294.5 KB
namenode_1  | 18/05/09 17:57:27 INFO util.GSet: capacity      = 2^15 = 32768 entries
namenode_1  | 18/05/09 17:57:27 INFO common.Storage: Lock on /hadoop/dfs/name/in_use.lock acquired by nodename 143@namenode
namenode_1  | 18/05/09 17:57:27 INFO namenode.FileJournalManager: Recovering unfinalized segments in /hadoop/dfs/name/current
namenode_1  | 18/05/09 17:57:27 INFO namenode.FSImage: No edit log streams selected.
namenode_1  | 18/05/09 17:57:27 INFO namenode.FSImage: Planning to load image: FSImageFile(file=/hadoop/dfs/name/current/fsimage_0000000000000000000, cpktTxId=0000000000000000000)
namenode_1  | 18/05/09 17:57:28 INFO namenode.FSImageFormatPBINode: Loading 1 INodes.
namenode_1  | 18/05/09 17:57:28 INFO namenode.FSImageFormatProtobuf: Loaded FSImage in 0 seconds.
namenode_1  | 18/05/09 17:57:28 INFO namenode.FSImage: Loaded image for txid 0 from /hadoop/dfs/name/current/fsimage_0000000000000000000
namenode_1  | 18/05/09 17:57:28 INFO namenode.FSNamesystem: Need to save fs image? false (staleImage=false, haEnabled=false, isRollingUpgrade=false)
namenode_1  | 18/05/09 17:57:28 INFO namenode.FSEditLog: Starting log segment at 1
namenode_1  | 18/05/09 17:57:28 INFO namenode.NameCache: initialized with 0 entries 0 lookups
namenode_1  | 18/05/09 17:57:28 INFO namenode.FSNamesystem: Finished loading FSImage in 273 msecs
namenode_1  | 18/05/09 17:57:28 INFO namenode.NameNode: RPC server is binding to 0.0.0.0:8020
namenode_1  | 18/05/09 17:57:28 INFO ipc.CallQueueManager: Using callQueue: class java.util.concurrent.LinkedBlockingQueue queueCapacity: 1000 scheduler: class org.apache.hadoop.ipc.DefaultRpcScheduler
namenode_1  | 18/05/09 17:57:28 INFO ipc.Server: Starting Socket Reader #1 for port 8020
namenode_1  | 18/05/09 17:57:28 INFO namenode.FSNamesystem: Registered FSNamesystemState MBean
namenode_1  | 18/05/09 17:57:28 INFO namenode.LeaseManager: Number of blocks under construction: 0
namenode_1  | 18/05/09 17:57:28 INFO blockmanagement.BlockManager: initializing replication queues
namenode_1  | 18/05/09 17:57:28 INFO hdfs.StateChange: STATE* Leaving safe mode after 0 secs
namenode_1  | 18/05/09 17:57:28 INFO hdfs.StateChange: STATE* Network topology has 0 racks and 0 datanodes
namenode_1  | 18/05/09 17:57:28 INFO hdfs.StateChange: STATE* UnderReplicatedBlocks has 0 blocks
namenode_1  | 18/05/09 17:57:28 INFO blockmanagement.BlockManager: Total number of blocks            = 0
namenode_1  | 18/05/09 17:57:28 INFO blockmanagement.BlockManager: Number of invalid blocks          = 0
namenode_1  | 18/05/09 17:57:28 INFO blockmanagement.BlockManager: Number of under-replicated blocks = 0
namenode_1  | 18/05/09 17:57:28 INFO blockmanagement.BlockManager: Number of  over-replicated blocks = 0
namenode_1  | 18/05/09 17:57:28 INFO blockmanagement.BlockManager: Number of blocks being written    = 0
namenode_1  | 18/05/09 17:57:28 INFO hdfs.StateChange: STATE* Replication Queue initialization scan for invalid, over- and under-replicated blocks completed in 8 msec
namenode_1  | 18/05/09 17:57:28 INFO ipc.Server: IPC Server Responder: starting
namenode_1  | 18/05/09 17:57:28 INFO ipc.Server: IPC Server listener on 8020: starting
namenode_1  | 18/05/09 17:57:28 INFO namenode.NameNode: NameNode RPC up at: namenode/172.18.0.2:8020
namenode_1  | 18/05/09 17:57:28 INFO namenode.FSNamesystem: Starting services required for active state
namenode_1  | 18/05/09 17:57:28 INFO namenode.FSDirectory: Initializing quota with 4 thread(s)
namenode_1  | 18/05/09 17:57:28 INFO namenode.FSDirectory: Quota initialization completed in 4 milliseconds
namenode_1  | name space=1
namenode_1  | storage space=0
namenode_1  | storage types=RAM_DISK=0, SSD=0, DISK=0, ARCHIVE=0
namenode_1  | 18/05/09 17:57:28 INFO blockmanagement.CacheReplicationMonitor: Starting CacheReplicationMonitor with interval 30000 milliseconds
datanode_1  | 18/05/09 17:57:28 INFO ipc.Client: Retrying connect to server: namenode/172.18.0.2:8020. Already tried 0 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1000 MILLISECONDS)
datanode_1  | 18/05/09 17:57:28 INFO datanode.DataNode: Acknowledging ACTIVE Namenode during handshakeBlock pool <registering> (Datanode Uuid unassigned) service to namenode/172.18.0.2:8020
datanode_1  | 18/05/09 17:57:28 INFO common.Storage: Using 1 threads to upgrade data directories (dfs.datanode.parallel.volumes.load.threads.num=1, dataDirs=1)
datanode_1  | 18/05/09 17:57:28 INFO common.Storage: Lock on /hadoop/dfs/data/in_use.lock acquired by nodename 83@datanode
datanode_1  | 18/05/09 17:57:28 INFO common.Storage: Storage directory /hadoop/dfs/data is not formatted for namespace 240718783. Formatting...
datanode_1  | 18/05/09 17:57:28 INFO common.Storage: Generated new storageID DS-4534aacf-6b93-4505-ba5a-e06ea7d56b78 for directory /hadoop/dfs/data
datanode_1  | 18/05/09 17:57:28 INFO common.Storage: Analyzing storage directories for bpid BP-1888519677-172.18.0.2-1525888646324
datanode_1  | 18/05/09 17:57:28 INFO common.Storage: Locking is disabled for /hadoop/dfs/data/current/BP-1888519677-172.18.0.2-1525888646324
datanode_1  | 18/05/09 17:57:28 INFO common.Storage: Block pool storage directory /hadoop/dfs/data/current/BP-1888519677-172.18.0.2-1525888646324 is not formatted for BP-1888519677-172.18.0.2-1525888646324. Formatting ...
datanode_1  | 18/05/09 17:57:28 INFO common.Storage: Formatting block pool BP-1888519677-172.18.0.2-1525888646324 directory /hadoop/dfs/data/current/BP-1888519677-172.18.0.2-1525888646324/current
datanode_1  | 18/05/09 17:57:28 INFO datanode.DataNode: Setting up storage: nsid=240718783;bpid=BP-1888519677-172.18.0.2-1525888646324;lv=-57;nsInfo=lv=-63;cid=CID-6349bee1-3008-4353-9499-871de7cc896b;nsid=240718783;c=1525888646324;bpid=BP-1888519677-172.18.0.2-1525888646324;dnuuid=null
datanode_1  | 18/05/09 17:57:28 INFO datanode.DataNode: Generated and persisted new Datanode UUID 4e96d8e9-2d2c-4811-8369-9ba9ce6f6fe9
datanode_1  | 18/05/09 17:57:28 INFO impl.FsDatasetImpl: Added new volume: DS-4534aacf-6b93-4505-ba5a-e06ea7d56b78
datanode_1  | 18/05/09 17:57:28 INFO impl.FsDatasetImpl: Added volume - /hadoop/dfs/data/current, StorageType: DISK
datanode_1  | 18/05/09 17:57:28 INFO impl.FsDatasetImpl: Registered FSDatasetState MBean
datanode_1  | 18/05/09 17:57:28 INFO impl.FsDatasetImpl: Volume reference is released.
datanode_1  | 18/05/09 17:57:28 INFO impl.FsDatasetImpl: Adding block pool BP-1888519677-172.18.0.2-1525888646324
datanode_1  | 18/05/09 17:57:28 INFO impl.FsDatasetImpl: Scanning block pool BP-1888519677-172.18.0.2-1525888646324 on volume /hadoop/dfs/data/current...
datanode_1  | 18/05/09 17:57:28 INFO impl.FsDatasetImpl: Time taken to scan block pool BP-1888519677-172.18.0.2-1525888646324 on /hadoop/dfs/data/current: 12ms
datanode_1  | 18/05/09 17:57:28 INFO impl.FsDatasetImpl: Total time to scan all replicas for block pool BP-1888519677-172.18.0.2-1525888646324: 13ms
datanode_1  | 18/05/09 17:57:28 INFO impl.FsDatasetImpl: Adding replicas to map for block pool BP-1888519677-172.18.0.2-1525888646324 on volume /hadoop/dfs/data/current...
datanode_1  | 18/05/09 17:57:28 INFO impl.BlockPoolSlice: Replica Cache file: /hadoop/dfs/data/current/BP-1888519677-172.18.0.2-1525888646324/current/replicas doesn't exist 
datanode_1  | 18/05/09 17:57:28 INFO impl.FsDatasetImpl: Time to add replicas to map for block pool BP-1888519677-172.18.0.2-1525888646324 on volume /hadoop/dfs/data/current: 0ms
datanode_1  | 18/05/09 17:57:28 INFO impl.FsDatasetImpl: Total time to add all replicas to map: 1ms
datanode_1  | 18/05/09 17:57:28 INFO datanode.VolumeScanner: Now scanning bpid BP-1888519677-172.18.0.2-1525888646324 on volume /hadoop/dfs/data
datanode_1  | 18/05/09 17:57:28 INFO datanode.VolumeScanner: VolumeScanner(/hadoop/dfs/data, DS-4534aacf-6b93-4505-ba5a-e06ea7d56b78): finished scanning block pool BP-1888519677-172.18.0.2-1525888646324
datanode_1  | 18/05/09 17:57:28 INFO datanode.DirectoryScanner: Periodic Directory Tree Verification scan starting at 5/9/18 6:39 PM with interval of 21600000ms
datanode_1  | 18/05/09 17:57:28 INFO datanode.DataNode: Block pool BP-1888519677-172.18.0.2-1525888646324 (Datanode Uuid 4e96d8e9-2d2c-4811-8369-9ba9ce6f6fe9) service to namenode/172.18.0.2:8020 beginning handshake with NN
datanode_1  | 18/05/09 17:57:28 INFO datanode.VolumeScanner: VolumeScanner(/hadoop/dfs/data, DS-4534aacf-6b93-4505-ba5a-e06ea7d56b78): no suitable block pools found to scan.  Waiting 1814399974 ms.
namenode_1  | 18/05/09 17:57:28 INFO hdfs.StateChange: BLOCK* registerDatanode: from DatanodeRegistration(172.18.0.3:50010, datanodeUuid=4e96d8e9-2d2c-4811-8369-9ba9ce6f6fe9, infoPort=50075, infoSecurePort=0, ipcPort=50020, storageInfo=lv=-57;cid=CID-6349bee1-3008-4353-9499-871de7cc896b;nsid=240718783;c=1525888646324) storage 4e96d8e9-2d2c-4811-8369-9ba9ce6f6fe9
namenode_1  | 18/05/09 17:57:28 INFO net.NetworkTopology: Adding a new node: /default-rack/172.18.0.3:50010
namenode_1  | 18/05/09 17:57:28 INFO blockmanagement.BlockReportLeaseManager: Registered DN 4e96d8e9-2d2c-4811-8369-9ba9ce6f6fe9 (172.18.0.3:50010).
datanode_1  | 18/05/09 17:57:28 INFO datanode.DataNode: Block pool Block pool BP-1888519677-172.18.0.2-1525888646324 (Datanode Uuid 4e96d8e9-2d2c-4811-8369-9ba9ce6f6fe9) service to namenode/172.18.0.2:8020 successfully registered with NN
datanode_1  | 18/05/09 17:57:28 INFO datanode.DataNode: For namenode namenode/172.18.0.2:8020 using BLOCKREPORT_INTERVAL of 21600000msec CACHEREPORT_INTERVAL of 10000msec Initial delay: 0msec; heartBeatInterval=3000
namenode_1  | 18/05/09 17:57:28 INFO blockmanagement.DatanodeDescriptor: Adding new storage ID DS-4534aacf-6b93-4505-ba5a-e06ea7d56b78 for DN 172.18.0.3:50010
namenode_1  | 18/05/09 17:57:28 INFO BlockStateChange: BLOCK* processReport 0x7ee94ace2da7a55: Processing first storage report for DS-4534aacf-6b93-4505-ba5a-e06ea7d56b78 from datanode 4e96d8e9-2d2c-4811-8369-9ba9ce6f6fe9
namenode_1  | 18/05/09 17:57:28 INFO BlockStateChange: BLOCK* processReport 0x7ee94ace2da7a55: from storage DS-4534aacf-6b93-4505-ba5a-e06ea7d56b78 node DatanodeRegistration(172.18.0.3:50010, datanodeUuid=4e96d8e9-2d2c-4811-8369-9ba9ce6f6fe9, infoPort=50075, infoSecurePort=0, ipcPort=50020, storageInfo=lv=-57;cid=CID-6349bee1-3008-4353-9499-871de7cc896b;nsid=240718783;c=1525888646324), blocks: 0, hasStaleStorage: false, processing time: 2 msecs, invalidatedBlocks: 0
datanode_1  | 18/05/09 17:57:28 INFO datanode.DataNode: Successfully sent block report 0x7ee94ace2da7a55,  containing 1 storage report(s), of which we sent 1. The reports had 0 total blocks and used 1 RPC(s). This took 4 msec to generate and 46 msecs for RPC and NN processing. Got back one command: FinalizeCommand/5.
datanode_1  | 18/05/09 17:57:28 INFO datanode.DataNode: Got finalize command for block pool BP-1888519677-172.18.0.2-1525888646324
test_1      | INFO	Instantiated configuration from '/app/sdks/python/apache_beam/io/hdfs_integration_test/hdfscli.cfg'.
test_1      | INFO	Instantiated <InsecureClient(url='http://namenode:50070')>.
test_1      | INFO	Uploading 'kinglear.txt' to '/'.
test_1      | DEBUG	Resolved path '/' to '/'.
test_1      | INFO	Listing '/'.
test_1      | DEBUG	Resolved path '/' to '/'.
test_1      | DEBUG	Resolved path '/' to '/'.
test_1      | DEBUG	Starting new HTTP connection (1): namenode
namenode_1  | May 09, 2018 5:58:13 PM com.sun.jersey.api.core.PackagesResourceConfig init
namenode_1  | INFO: Scanning for root resource and provider classes in the packages:
namenode_1  |   org.apache.hadoop.hdfs.server.namenode.web.resources
namenode_1  |   org.apache.hadoop.hdfs.web.resources
namenode_1  | May 09, 2018 5:58:13 PM com.sun.jersey.api.core.ScanningResourceConfig logClasses
namenode_1  | INFO: Root resource classes found:
namenode_1  |   class org.apache.hadoop.hdfs.server.namenode.web.resources.NamenodeWebHdfsMethods
namenode_1  | May 09, 2018 5:58:13 PM com.sun.jersey.api.core.ScanningResourceConfig logClasses
namenode_1  | INFO: Provider classes found:
namenode_1  |   class org.apache.hadoop.hdfs.web.resources.UserProvider
namenode_1  |   class org.apache.hadoop.hdfs.web.resources.ExceptionHandler
namenode_1  | May 09, 2018 5:58:13 PM com.sun.jersey.server.impl.application.WebApplicationImpl _initiate
namenode_1  | INFO: Initiating Jersey application, version 'Jersey: 1.9 09/02/2011 11:17 AM'
namenode_1  | May 09, 2018 5:58:14 PM com.sun.jersey.spi.inject.Errors processErrorMessages
namenode_1  | WARNING: The following warnings have been detected with resource and/or provider classes:
namenode_1  |   WARNING: A sub-resource method, public javax.ws.rs.core.Response org.apache.hadoop.hdfs.server.namenode.web.resources.NamenodeWebHdfsMethods.putRoot(org.apache.hadoop.security.UserGroupInformation,org.apache.hadoop.hdfs.web.resources.DelegationParam,org.apache.hadoop.hdfs.web.resources.UserParam,org.apache.hadoop.hdfs.web.resources.DoAsParam,org.apache.hadoop.hdfs.web.resources.PutOpParam,org.apache.hadoop.hdfs.web.resources.DestinationParam,org.apache.hadoop.hdfs.web.resources.OwnerParam,org.apache.hadoop.hdfs.web.resources.GroupParam,org.apache.hadoop.hdfs.web.resources.PermissionParam,org.apache.hadoop.hdfs.web.resources.OverwriteParam,org.apache.hadoop.hdfs.web.resources.BufferSizeParam,org.apache.hadoop.hdfs.web.resources.ReplicationParam,org.apache.hadoop.hdfs.web.resources.BlockSizeParam,org.apache.hadoop.hdfs.web.resources.ModificationTimeParam,org.apache.hadoop.hdfs.web.resources.AccessTimeParam,org.apache.hadoop.hdfs.web.resources.RenameOptionSetParam,org.apache.hadoop.hdfs.web.resources.CreateParentParam,org.apache.hadoop.hdfs.web.resources.TokenArgumentParam,org.apache.hadoop.hdfs.web.resources.AclPermissionParam,org.apache.hadoop.hdfs.web.resources.XAttrNameParam,org.apache.hadoop.hdfs.web.resources.XAttrValueParam,org.apache.hadoop.hdfs.web.resources.XAttrSetFlagParam,org.apache.hadoop.hdfs.web.resources.SnapshotNameParam,org.apache.hadoop.hdfs.web.resources.OldSnapshotNameParam,org.apache.hadoop.hdfs.web.resources.ExcludeDatanodesParam,org.apache.hadoop.hdfs.web.resources.CreateFlagParam,org.apache.hadoop.hdfs.web.resources.StoragePolicyParam) throws java.io.IOException,java.lang.InterruptedException, with URI template, "/", is treated as a resource method
namenode_1  |   WARNING: A sub-resource method, public javax.ws.rs.core.Response org.apache.hadoop.hdfs.server.namenode.web.resources.NamenodeWebHdfsMethods.postRoot(org.apache.hadoop.security.UserGroupInformation,org.apache.hadoop.hdfs.web.resources.DelegationParam,org.apache.hadoop.hdfs.web.resources.UserParam,org.apache.hadoop.hdfs.web.resources.DoAsParam,org.apache.hadoop.hdfs.web.resources.PostOpParam,org.apache.hadoop.hdfs.web.resources.ConcatSourcesParam,org.apache.hadoop.hdfs.web.resources.BufferSizeParam,org.apache.hadoop.hdfs.web.resources.ExcludeDatanodesParam,org.apache.hadoop.hdfs.web.resources.NewLengthParam) throws java.io.IOException,java.lang.InterruptedException, with URI template, "/", is treated as a resource method
namenode_1  |   WARNING: A sub-resource method, public javax.ws.rs.core.Response org.apache.hadoop.hdfs.server.namenode.web.resources.NamenodeWebHdfsMethods.deleteRoot(org.apache.hadoop.security.UserGroupInformation,org.apache.hadoop.hdfs.web.resources.DelegationParam,org.apache.hadoop.hdfs.web.resources.UserParam,org.apache.hadoop.hdfs.web.resources.DoAsParam,org.apache.hadoop.hdfs.web.resources.DeleteOpParam,org.apache.hadoop.hdfs.web.resources.RecursiveParam,org.apache.hadoop.hdfs.web.resources.SnapshotNameParam) throws java.io.IOException,java.lang.InterruptedException, with URI template, "/", is treated as a resource method
namenode_1  |   WARNING: A sub-resource method, public javax.ws.rs.core.Response org.apache.hadoop.hdfs.server.namenode.web.resources.NamenodeWebHdfsMethods.getRoot(org.apache.hadoop.security.UserGroupInformation,org.apache.hadoop.hdfs.web.resources.DelegationParam,org.apache.hadoop.hdfs.web.resources.UserParam,org.apache.hadoop.hdfs.web.resources.DoAsParam,org.apache.hadoop.hdfs.web.resources.GetOpParam,org.apache.hadoop.hdfs.web.resources.OffsetParam,org.apache.hadoop.hdfs.web.resources.LengthParam,org.apache.hadoop.hdfs.web.resources.RenewerParam,org.apache.hadoop.hdfs.web.resources.BufferSizeParam,java.util.List,org.apache.hadoop.hdfs.web.resources.XAttrEncodingParam,org.apache.hadoop.hdfs.web.resources.ExcludeDatanodesParam,org.apache.hadoop.hdfs.web.resources.FsActionParam,org.apache.hadoop.hdfs.web.resources.TokenKindParam,org.apache.hadoop.hdfs.web.resources.TokenServiceParam) throws java.io.IOException,java.lang.InterruptedException, with URI template, "/", is treated as a resource method
test_1      | DEBUG	http://namenode:50070 "GET /webhdfs/v1/?user.name=root&op=LISTSTATUS HTTP/1.1" 200 None
test_1      | DEBUG	Uploading 1 files using 1 thread(s).
test_1      | DEBUG	Uploading 'kinglear.txt' to '/kinglear.txt'.
test_1      | INFO	Writing to '/kinglear.txt'.
test_1      | DEBUG	Resolved path '/kinglear.txt' to '/kinglear.txt'.
test_1      | DEBUG	http://namenode:50070 "PUT /webhdfs/v1/kinglear.txt?user.name=root&overwrite=False&op=CREATE HTTP/1.1" 307 0
test_1      | DEBUG	Starting new HTTP connection (1): datanode
datanode_1  | 18/05/09 17:58:14 INFO datanode.webhdfs: 172.18.0.4 PUT /webhdfs/v1/kinglear.txt?op=CREATE&user.name=root&namenoderpcaddress=namenode:8020&createflag=&createparent=true&overwrite=false&user.name=root 201
namenode_1  | 18/05/09 17:58:14 INFO hdfs.StateChange: BLOCK* allocate blk_1073741825_1001, replicas=172.18.0.3:50010 for /kinglear.txt
datanode_1  | 18/05/09 17:58:14 INFO datanode.DataNode: Receiving BP-1888519677-172.18.0.2-1525888646324:blk_1073741825_1001 src: /172.18.0.3:53446 dest: /172.18.0.3:50010
datanode_1  | 18/05/09 17:58:14 INFO DataNode.clienttrace: src: /172.18.0.3:53446, dest: /172.18.0.3:50010, bytes: 157283, op: HDFS_WRITE, cliID: DFSClient_NONMAPREDUCE_-1227625114_67, offset: 0, srvID: 4e96d8e9-2d2c-4811-8369-9ba9ce6f6fe9, blockid: BP-1888519677-172.18.0.2-1525888646324:blk_1073741825_1001, duration: 9373962
datanode_1  | 18/05/09 17:58:14 INFO datanode.DataNode: PacketResponder: BP-1888519677-172.18.0.2-1525888646324:blk_1073741825_1001, type=LAST_IN_PIPELINE terminating
namenode_1  | 18/05/09 17:58:14 INFO namenode.FSNamesystem: BLOCK* blk_1073741825_1001 is COMMITTED but not COMPLETE(numNodes= 0 <  minimum = 1) in file /kinglear.txt
namenode_1  | 18/05/09 17:58:14 INFO namenode.EditLogFileOutputStream: Nothing to flush
namenode_1  | 18/05/09 17:58:15 INFO hdfs.StateChange: DIR* completeFile: /kinglear.txt is closed by DFSClient_NONMAPREDUCE_-1227625114_67
test_1      | DEBUG	Upload of 'kinglear.txt' to '/kinglear.txt' complete.
test_1      | INFO:root:Missing pipeline option (runner). Executing pipeline using the default runner: DirectRunner.
test_1      | INFO:root:==================== <function annotate_downstream_side_inputs at 0x7fa0a9b4fc08> ====================
test_1      | INFO:root:==================== <function lift_combiners at 0x7fa0a9b4ff50> ====================
test_1      | INFO:root:==================== <function expand_gbk at 0x7fa0a9b4f410> ====================
test_1      | INFO:root:==================== <function sink_flattens at 0x7fa0a9b4f7d0> ====================
test_1      | INFO:root:==================== <function greedily_fuse at 0x7fa0a9b4f6e0> ====================
test_1      | INFO:root:==================== <function sort_stages at 0x7fa0a9b4f5f0> ====================
test_1      | INFO:root:Running (((ref_AppliedPTransform_write/Write/WriteImpl/DoOnce/Read_16)+(ref_AppliedPTransform_write/Write/WriteImpl/InitializeWrite_17))+(ref_PCollection_PCollection_9/Write))+(ref_PCollection_PCollection_10/Write)
test_1      | INFO:root:start <DataOutputOperation ref_PCollection_PCollection_9/Write >
test_1      | INFO:root:start <DataOutputOperation ref_PCollection_PCollection_10/Write >
test_1      | INFO:root:start <DoOperation write/Write/WriteImpl/InitializeWrite output_tags=['out']>
test_1      | INFO:root:start <ReadOperation write/Write/WriteImpl/DoOnce/Read source=SourceBundle(weight=1.0, source=<apache_beam.transforms.core._CreateSource object at 0x7fa0a9b21050>, start_position=None, stop_position=None)>
test_1      | INFO:root:finish <ReadOperation write/Write/WriteImpl/DoOnce/Read source=SourceBundle(weight=1.0, source=<apache_beam.transforms.core._CreateSource object at 0x7fa0a9b21050>, start_position=None, stop_position=None), receivers=[ConsumerSet[write/Write/WriteImpl/DoOnce/Read.out0, coder=WindowedValueCoder[FastPrimitivesCoder], len(consumers)=2]]>
test_1      | INFO:root:finish <DoOperation write/Write/WriteImpl/InitializeWrite output_tags=['out'], receivers=[ConsumerSet[write/Write/WriteImpl/InitializeWrite.out0, coder=WindowedValueCoder[FastPrimitivesCoder], len(consumers)=1]]>
test_1      | INFO:root:finish <DataOutputOperation ref_PCollection_PCollection_10/Write >
test_1      | INFO:root:finish <DataOutputOperation ref_PCollection_PCollection_9/Write >
test_1      | INFO:root:Running (ref_AppliedPTransform_read/Read_3)+((ref_AppliedPTransform_split_4)+((ref_AppliedPTransform_pair_with_one_5)+(group/Write)))
test_1      | INFO:root:start <DataOutputOperation group/Write >
test_1      | INFO:root:start <DoOperation pair_with_one output_tags=['out']>
test_1      | INFO:root:start <DoOperation split output_tags=['out']>
test_1      | INFO:root:start <ReadOperation read/Read source=SourceBundle(weight=1.0, source=<apache_beam.io.textio._TextSource object at 0x7fa0a9b21550>, start_position=None, stop_position=None)>
datanode_1  | 18/05/09 17:58:16 INFO datanode.webhdfs: 172.18.0.4 GET /webhdfs/v1/kinglear.txt?op=OPEN&user.name=root&namenoderpcaddress=namenode:8020&length=157284&offset=0 200
test_1      | INFO:root:finish <ReadOperation read/Read source=SourceBundle(weight=1.0, source=<apache_beam.io.textio._TextSource object at 0x7fa0a9b21550>, start_position=None, stop_position=None), receivers=[ConsumerSet[read/Read.out0, coder=WindowedValueCoder[FastPrimitivesCoder], len(consumers)=1]]>
test_1      | INFO:root:finish <DoOperation split output_tags=['out'], receivers=[ConsumerSet[split.out0, coder=WindowedValueCoder[StrUtf8Coder], len(consumers)=1]]>
test_1      | INFO:root:finish <DoOperation pair_with_one output_tags=['out'], receivers=[ConsumerSet[pair_with_one.out0, coder=WindowedValueCoder[TupleCoder[LengthPrefixCoder[StrUtf8Coder], VarIntCoder]], len(consumers)=1]]>
test_1      | INFO:root:finish <DataOutputOperation group/Write >
test_1      | INFO:root:Running (((group/Read)+((ref_AppliedPTransform_count_10)+(ref_AppliedPTransform_format_11)))+(ref_AppliedPTransform_write/Write/WriteImpl/WriteBundles_18))+((ref_AppliedPTransform_write/Write/WriteImpl/Pair_19)+((ref_AppliedPTransform_write/Write/WriteImpl/WindowInto(WindowIntoFn)_20)+(write/Write/WriteImpl/GroupByKey/Write)))
test_1      | INFO:root:start <DataOutputOperation write/Write/WriteImpl/GroupByKey/Write >
test_1      | INFO:root:start <DoOperation write/Write/WriteImpl/WindowInto(WindowIntoFn) output_tags=['out']>
test_1      | INFO:root:start <DoOperation write/Write/WriteImpl/Pair output_tags=['out']>
test_1      | INFO:root:start <DoOperation write/Write/WriteImpl/WriteBundles output_tags=['out']>
test_1      | INFO:root:start <DoOperation format output_tags=['out']>
test_1      | INFO:root:start <DoOperation count output_tags=['out']>
test_1      | INFO:root:start <DataInputOperation group/Read receivers=[ConsumerSet[group/Read.out0, coder=WindowedValueCoder[TupleCoder[LengthPrefixCoder[StrUtf8Coder], IterableCoder[VarIntCoder]]], len(consumers)=1]]>
test_1      | WARNING:root:Mime types are not supported. Got non-default mime_type: text/plain
datanode_1  | 18/05/09 17:58:17 INFO datanode.webhdfs: 172.18.0.4 PUT /webhdfs/v1/beam-temp-py-wordcount-integration-8c83502853b211e8b7920242ac120004/30859902-3eb0-4fde-a3b0-5f4c3021bc5e.py-wordcount-integration?op=CREATE&user.name=root&namenoderpcaddress=namenode:8020&createflag=&createparent=true&overwrite=false&user.name=root 201
test_1      | INFO:root:finish <DataInputOperation group/Read receivers=[ConsumerSet[group/Read.out0, coder=WindowedValueCoder[TupleCoder[LengthPrefixCoder[StrUtf8Coder], IterableCoder[VarIntCoder]]], len(consumers)=1]]>
test_1      | INFO:root:finish <DoOperation count output_tags=['out'], receivers=[ConsumerSet[count.out0, coder=WindowedValueCoder[TupleCoder[StrUtf8Coder, FastPrimitivesCoder]], len(consumers)=1]]>
test_1      | INFO:root:finish <DoOperation format output_tags=['out'], receivers=[ConsumerSet[format.out0, coder=WindowedValueCoder[FastPrimitivesCoder], len(consumers)=1]]>
test_1      | INFO:root:finish <DoOperation write/Write/WriteImpl/WriteBundles output_tags=['out'], receivers=[ConsumerSet[write/Write/WriteImpl/WriteBundles.out0, coder=WindowedValueCoder[FastPrimitivesCoder], len(consumers)=1]]>
namenode_1  | 18/05/09 17:58:18 INFO hdfs.StateChange: BLOCK* allocate blk_1073741826_1002, replicas=172.18.0.3:50010 for /beam-temp-py-wordcount-integration-8c83502853b211e8b7920242ac120004/30859902-3eb0-4fde-a3b0-5f4c3021bc5e.py-wordcount-integration
datanode_1  | 18/05/09 17:58:18 INFO datanode.DataNode: Receiving BP-1888519677-172.18.0.2-1525888646324:blk_1073741826_1002 src: /172.18.0.3:53474 dest: /172.18.0.3:50010
datanode_1  | 18/05/09 17:58:18 INFO DataNode.clienttrace: src: /172.18.0.3:53474, dest: /172.18.0.3:50010, bytes: 48944, op: HDFS_WRITE, cliID: DFSClient_NONMAPREDUCE_-847469805_69, offset: 0, srvID: 4e96d8e9-2d2c-4811-8369-9ba9ce6f6fe9, blockid: BP-1888519677-172.18.0.2-1525888646324:blk_1073741826_1002, duration: 3160686
datanode_1  | 18/05/09 17:58:18 INFO datanode.DataNode: PacketResponder: BP-1888519677-172.18.0.2-1525888646324:blk_1073741826_1002, type=LAST_IN_PIPELINE terminating
namenode_1  | 18/05/09 17:58:18 INFO hdfs.StateChange: DIR* completeFile: /beam-temp-py-wordcount-integration-8c83502853b211e8b7920242ac120004/30859902-3eb0-4fde-a3b0-5f4c3021bc5e.py-wordcount-integration is closed by DFSClient_NONMAPREDUCE_-847469805_69
test_1      | INFO:root:finish <DoOperation write/Write/WriteImpl/Pair output_tags=['out'], receivers=[ConsumerSet[write/Write/WriteImpl/Pair.out0, coder=WindowedValueCoder[TupleCoder[FastPrimitivesCoder, FastPrimitivesCoder]], len(consumers)=1]]>
test_1      | INFO:root:finish <DoOperation write/Write/WriteImpl/WindowInto(WindowIntoFn) output_tags=['out'], receivers=[ConsumerSet[write/Write/WriteImpl/WindowInto(WindowIntoFn).out0, coder=WindowedValueCoder[TupleCoder[LengthPrefixCoder[FastPrimitivesCoder], LengthPrefixCoder[FastPrimitivesCoder]]], len(consumers)=1]]>
test_1      | INFO:root:finish <DataOutputOperation write/Write/WriteImpl/GroupByKey/Write >
test_1      | INFO:root:Running (write/Write/WriteImpl/GroupByKey/Read)+((ref_AppliedPTransform_write/Write/WriteImpl/Extract_25)+(ref_PCollection_PCollection_17/Write))
test_1      | INFO:root:start <DataOutputOperation ref_PCollection_PCollection_17/Write >
test_1      | INFO:root:start <DoOperation write/Write/WriteImpl/Extract output_tags=['out']>
test_1      | INFO:root:start <DataInputOperation write/Write/WriteImpl/GroupByKey/Read receivers=[ConsumerSet[write/Write/WriteImpl/GroupByKey/Read.out0, coder=WindowedValueCoder[TupleCoder[LengthPrefixCoder[FastPrimitivesCoder], IterableCoder[LengthPrefixCoder[FastPrimitivesCoder]]]], len(consumers)=1]]>
test_1      | INFO:root:finish <DataInputOperation write/Write/WriteImpl/GroupByKey/Read receivers=[ConsumerSet[write/Write/WriteImpl/GroupByKey/Read.out0, coder=WindowedValueCoder[TupleCoder[LengthPrefixCoder[FastPrimitivesCoder], IterableCoder[LengthPrefixCoder[FastPrimitivesCoder]]]], len(consumers)=1]]>
test_1      | INFO:root:finish <DoOperation write/Write/WriteImpl/Extract output_tags=['out'], receivers=[ConsumerSet[write/Write/WriteImpl/Extract.out0, coder=WindowedValueCoder[FastPrimitivesCoder], len(consumers)=1]]>
test_1      | INFO:root:finish <DataOutputOperation ref_PCollection_PCollection_17/Write >
test_1      | INFO:root:Running ((ref_PCollection_PCollection_9/Read)+(ref_AppliedPTransform_write/Write/WriteImpl/PreFinalize_26))+(ref_PCollection_PCollection_18/Write)
test_1      | INFO:root:start <DataOutputOperation ref_PCollection_PCollection_18/Write >
test_1      | INFO:root:start <DoOperation write/Write/WriteImpl/PreFinalize output_tags=['out']>
test_1      | INFO:root:start <DataInputOperation ref_PCollection_PCollection_9/Read receivers=[ConsumerSet[ref_PCollection_PCollection_9/Read.out0, coder=WindowedValueCoder[FastPrimitivesCoder], len(consumers)=1]]>
test_1      | INFO:root:finish <DataInputOperation ref_PCollection_PCollection_9/Read receivers=[ConsumerSet[ref_PCollection_PCollection_9/Read.out0, coder=WindowedValueCoder[FastPrimitivesCoder], len(consumers)=1]]>
test_1      | INFO:root:finish <DoOperation write/Write/WriteImpl/PreFinalize output_tags=['out'], receivers=[ConsumerSet[write/Write/WriteImpl/PreFinalize.out0, coder=WindowedValueCoder[FastPrimitivesCoder], len(consumers)=1]]>
test_1      | INFO:root:finish <DataOutputOperation ref_PCollection_PCollection_18/Write >
test_1      | INFO:root:Running (ref_PCollection_PCollection_9/Read)+(ref_AppliedPTransform_write/Write/WriteImpl/FinalizeWrite_27)
test_1      | INFO:root:start <DoOperation write/Write/WriteImpl/FinalizeWrite output_tags=['out']>
test_1      | INFO:root:start <DataInputOperation ref_PCollection_PCollection_9/Read receivers=[ConsumerSet[ref_PCollection_PCollection_9/Read.out0, coder=WindowedValueCoder[FastPrimitivesCoder], len(consumers)=1]]>
test_1      | INFO:root:Starting finalize_write threads with num_shards: 1 (skipped: 0), batches: 1, num_threads: 1
test_1      | INFO:root:Renamed 1 shards in 0.13 seconds.
test_1      | INFO:root:finish <DataInputOperation ref_PCollection_PCollection_9/Read receivers=[ConsumerSet[ref_PCollection_PCollection_9/Read.out0, coder=WindowedValueCoder[FastPrimitivesCoder], len(consumers)=1]]>
test_1      | INFO:root:finish <DoOperation write/Write/WriteImpl/FinalizeWrite output_tags=['out'], receivers=[ConsumerSet[write/Write/WriteImpl/FinalizeWrite.out0, coder=WindowedValueCoder[FastPrimitivesCoder], len(consumers)=0]]>
test_1      | INFO:root:number of empty lines: 1663
test_1      | INFO:root:average word length: 4
hdfs_it-jenkins-beam_postcommit_python_verify-4909_test_1 exited with code 0
Stopping hdfs_it-jenkins-beam_postcommit_python_verify-4909_datanode_1 ... 
Stopping hdfs_it-jenkins-beam_postcommit_python_verify-4909_namenode_1 ... 
Stopping hdfs_it-jenkins-beam_postcommit_python_verify-4909_datanode_1 ... doneStopping hdfs_it-jenkins-beam_postcommit_python_verify-4909_namenode_1 ... doneAborting on container exit...

real	1m15.215s
user	0m0.573s
sys	0m0.120s
:pythonPostCommit (Thread[Daemon worker,5,main]) completed. Took 35 mins 9.656 secs.

BUILD SUCCESSFUL in 35m 14s
1 actionable task: 1 executed

Publishing build scan...
https://gradle.com/s/wzvs5eyjez6rk

Build step 'Invoke Gradle script' changed build result to SUCCESS
[Set GitHub commit status (universal)] SUCCESS on repos [GHRepository@4aa0c306[description=Apache Beam,homepage=,name=beam,fork=false,size=58734,milestones={},language=Java,commits={},source=<null>,parent=<null>,responseHeaderFields={null=[HTTP/1.1 200 OK], Access-Control-Allow-Origin=[*], Access-Control-Expose-Headers=[ETag, Link, Retry-After, X-GitHub-OTP, X-RateLimit-Limit, X-RateLimit-Remaining, X-RateLimit-Reset, X-OAuth-Scopes, X-Accepted-OAuth-Scopes, X-Poll-Interval], Cache-Control=[private, max-age=60, s-maxage=60], Content-Encoding=[gzip], Content-Security-Policy=[default-src 'none'], Content-Type=[application/json; charset=utf-8], Date=[Wed, 09 May 2018 17:58:40 GMT], ETag=[W/"e1b0f25c4d02a532b8653b75bac3a38f"], Last-Modified=[Wed, 09 May 2018 16:51:10 GMT], OkHttp-Received-Millis=[1525888720965], OkHttp-Response-Source=[NETWORK 200], OkHttp-Selected-Protocol=[http/1.1], OkHttp-Sent-Millis=[1525888720806], Referrer-Policy=[origin-when-cross-origin, strict-origin-when-cross-origin], Server=[GitHub.com], Status=[200 OK], Strict-Transport-Security=[max-age=31536000; includeSubdomains; preload], Transfer-Encoding=[chunked], Vary=[Accept, Authorization, Cookie, X-GitHub-OTP], X-Accepted-OAuth-Scopes=[repo], X-Content-Type-Options=[nosniff], X-Frame-Options=[deny], X-GitHub-Media-Type=[github.v3; format=json], X-GitHub-Request-Id=[C29C:4F40:DC755E:1E471FA:5AF336AD], X-OAuth-Scopes=[admin:repo_hook, repo, repo:status], X-RateLimit-Limit=[5000], X-RateLimit-Remaining=[4923], X-RateLimit-Reset=[1525889764], X-Runtime-rack=[0.078460], X-XSS-Protection=[1; mode=block]},url=https://api.github.com/repos/apache/beam,id=50904245]] (sha:2708329) with context:beam_PostCommit_Python_Verify
Setting commit status on GitHub for https://github.com/apache/beam/commit/27083293394e48c7d437f562dd504f04d55a05af
ERROR: Build step failed with exception
java.io.FileNotFoundException: https://api.github.com/repos/apache/beam/statuses/27083293394e48c7d437f562dd504f04d55a05af
	at com.squareup.okhttp.internal.huc.HttpURLConnectionImpl.getInputStream(HttpURLConnectionImpl.java:243)
	at com.squareup.okhttp.internal.huc.DelegatingHttpsURLConnection.getInputStream(DelegatingHttpsURLConnection.java:210)
	at com.squareup.okhttp.internal.huc.HttpsURLConnectionImpl.getInputStream(HttpsURLConnectionImpl.java:25)
	at org.kohsuke.github.Requester.parse(Requester.java:612)
	at org.kohsuke.github.Requester.parse(Requester.java:594)
	at org.kohsuke.github.Requester._to(Requester.java:272)
Caused: org.kohsuke.github.GHFileNotFoundException: {"message":"Not Found","documentation_url":"https://developer.github.com/v3/repos/statuses/#create-a-status"}
	at org.kohsuke.github.Requester.handleApiError(Requester.java:686)
	at org.kohsuke.github.Requester._to(Requester.java:293)
	at org.kohsuke.github.Requester.to(Requester.java:234)
	at org.kohsuke.github.GHRepository.createCommitStatus(GHRepository.java:1075)
	at org.jenkinsci.plugins.github.status.GitHubCommitStatusSetter.perform(GitHubCommitStatusSetter.java:160)
Caused: org.jenkinsci.plugins.github.common.CombineErrorHandler$ErrorHandlingException
	at org.jenkinsci.plugins.github.common.CombineErrorHandler.handle(CombineErrorHandler.java:74)
	at org.jenkinsci.plugins.github.status.GitHubCommitStatusSetter.perform(GitHubCommitStatusSetter.java:164)
	at com.cloudbees.jenkins.GitHubCommitNotifier.perform(GitHubCommitNotifier.java:151)
	at hudson.tasks.BuildStepCompatibilityLayer.perform(BuildStepCompatibilityLayer.java:81)
	at hudson.tasks.BuildStepMonitor$1.perform(BuildStepMonitor.java:20)
	at hudson.model.AbstractBuild$AbstractBuildExecution.perform(AbstractBuild.java:744)
	at hudson.model.AbstractBuild$AbstractBuildExecution.performAllBuildSteps(AbstractBuild.java:690)
	at hudson.model.Build$BuildExecution.post2(Build.java:186)
	at hudson.model.AbstractBuild$AbstractBuildExecution.post(AbstractBuild.java:635)
	at hudson.model.Run.execute(Run.java:1749)
	at hudson.model.FreeStyleBuild.run(FreeStyleBuild.java:43)
	at hudson.model.ResourceController.execute(ResourceController.java:97)
	at hudson.model.Executor.run(Executor.java:429)
Build step 'Set build status on GitHub commit [deprecated]' marked build as failure
Not sending mail to unregistered user yifanzou@yifanzou-linuxworkstation.sea.corp.google.com
Not sending mail to unregistered user github@alasdairhodge.co.uk


Build failed in Jenkins: beam_PostCommit_Python_Verify #4908

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_Python_Verify/4908/display/redirect>

------------------------------------------
[...truncated 1.01 MB...]
copying apache_beam/runners/direct/clock.py -> apache-beam-2.5.0.dev0/apache_beam/runners/direct
copying apache_beam/runners/direct/consumer_tracking_pipeline_visitor.py -> apache-beam-2.5.0.dev0/apache_beam/runners/direct
copying apache_beam/runners/direct/consumer_tracking_pipeline_visitor_test.py -> apache-beam-2.5.0.dev0/apache_beam/runners/direct
copying apache_beam/runners/direct/direct_metrics.py -> apache-beam-2.5.0.dev0/apache_beam/runners/direct
copying apache_beam/runners/direct/direct_metrics_test.py -> apache-beam-2.5.0.dev0/apache_beam/runners/direct
copying apache_beam/runners/direct/direct_runner.py -> apache-beam-2.5.0.dev0/apache_beam/runners/direct
copying apache_beam/runners/direct/direct_runner_test.py -> apache-beam-2.5.0.dev0/apache_beam/runners/direct
copying apache_beam/runners/direct/evaluation_context.py -> apache-beam-2.5.0.dev0/apache_beam/runners/direct
copying apache_beam/runners/direct/executor.py -> apache-beam-2.5.0.dev0/apache_beam/runners/direct
copying apache_beam/runners/direct/helper_transforms.py -> apache-beam-2.5.0.dev0/apache_beam/runners/direct
copying apache_beam/runners/direct/sdf_direct_runner.py -> apache-beam-2.5.0.dev0/apache_beam/runners/direct
copying apache_beam/runners/direct/sdf_direct_runner_test.py -> apache-beam-2.5.0.dev0/apache_beam/runners/direct
copying apache_beam/runners/direct/transform_evaluator.py -> apache-beam-2.5.0.dev0/apache_beam/runners/direct
copying apache_beam/runners/direct/util.py -> apache-beam-2.5.0.dev0/apache_beam/runners/direct
copying apache_beam/runners/direct/watermark_manager.py -> apache-beam-2.5.0.dev0/apache_beam/runners/direct
copying apache_beam/runners/experimental/__init__.py -> apache-beam-2.5.0.dev0/apache_beam/runners/experimental
copying apache_beam/runners/experimental/python_rpc_direct/__init__.py -> apache-beam-2.5.0.dev0/apache_beam/runners/experimental/python_rpc_direct
copying apache_beam/runners/experimental/python_rpc_direct/python_rpc_direct_runner.py -> apache-beam-2.5.0.dev0/apache_beam/runners/experimental/python_rpc_direct
copying apache_beam/runners/experimental/python_rpc_direct/server.py -> apache-beam-2.5.0.dev0/apache_beam/runners/experimental/python_rpc_direct
copying apache_beam/runners/job/__init__.py -> apache-beam-2.5.0.dev0/apache_beam/runners/job
copying apache_beam/runners/job/manager.py -> apache-beam-2.5.0.dev0/apache_beam/runners/job
copying apache_beam/runners/job/utils.py -> apache-beam-2.5.0.dev0/apache_beam/runners/job
copying apache_beam/runners/portability/__init__.py -> apache-beam-2.5.0.dev0/apache_beam/runners/portability
copying apache_beam/runners/portability/fn_api_runner.py -> apache-beam-2.5.0.dev0/apache_beam/runners/portability
copying apache_beam/runners/portability/fn_api_runner_test.py -> apache-beam-2.5.0.dev0/apache_beam/runners/portability
copying apache_beam/runners/portability/universal_local_runner.py -> apache-beam-2.5.0.dev0/apache_beam/runners/portability
copying apache_beam/runners/portability/universal_local_runner_main.py -> apache-beam-2.5.0.dev0/apache_beam/runners/portability
copying apache_beam/runners/portability/universal_local_runner_test.py -> apache-beam-2.5.0.dev0/apache_beam/runners/portability
copying apache_beam/runners/test/__init__.py -> apache-beam-2.5.0.dev0/apache_beam/runners/test
copying apache_beam/runners/worker/__init__.py -> apache-beam-2.5.0.dev0/apache_beam/runners/worker
copying apache_beam/runners/worker/bundle_processor.py -> apache-beam-2.5.0.dev0/apache_beam/runners/worker
copying apache_beam/runners/worker/data_plane.py -> apache-beam-2.5.0.dev0/apache_beam/runners/worker
copying apache_beam/runners/worker/data_plane_test.py -> apache-beam-2.5.0.dev0/apache_beam/runners/worker
copying apache_beam/runners/worker/log_handler.py -> apache-beam-2.5.0.dev0/apache_beam/runners/worker
copying apache_beam/runners/worker/log_handler_test.py -> apache-beam-2.5.0.dev0/apache_beam/runners/worker
copying apache_beam/runners/worker/logger.pxd -> apache-beam-2.5.0.dev0/apache_beam/runners/worker
copying apache_beam/runners/worker/logger.py -> apache-beam-2.5.0.dev0/apache_beam/runners/worker
copying apache_beam/runners/worker/logger_test.py -> apache-beam-2.5.0.dev0/apache_beam/runners/worker
copying apache_beam/runners/worker/opcounters.pxd -> apache-beam-2.5.0.dev0/apache_beam/runners/worker
copying apache_beam/runners/worker/opcounters.py -> apache-beam-2.5.0.dev0/apache_beam/runners/worker
copying apache_beam/runners/worker/opcounters_test.py -> apache-beam-2.5.0.dev0/apache_beam/runners/worker
copying apache_beam/runners/worker/operation_specs.py -> apache-beam-2.5.0.dev0/apache_beam/runners/worker
copying apache_beam/runners/worker/operations.pxd -> apache-beam-2.5.0.dev0/apache_beam/runners/worker
copying apache_beam/runners/worker/operations.py -> apache-beam-2.5.0.dev0/apache_beam/runners/worker
copying apache_beam/runners/worker/sdk_worker.py -> apache-beam-2.5.0.dev0/apache_beam/runners/worker
copying apache_beam/runners/worker/sdk_worker_main.py -> apache-beam-2.5.0.dev0/apache_beam/runners/worker
copying apache_beam/runners/worker/sdk_worker_main_test.py -> apache-beam-2.5.0.dev0/apache_beam/runners/worker
copying apache_beam/runners/worker/sdk_worker_test.py -> apache-beam-2.5.0.dev0/apache_beam/runners/worker
copying apache_beam/runners/worker/sideinputs.py -> apache-beam-2.5.0.dev0/apache_beam/runners/worker
copying apache_beam/runners/worker/sideinputs_test.py -> apache-beam-2.5.0.dev0/apache_beam/runners/worker
copying apache_beam/runners/worker/statesampler.py -> apache-beam-2.5.0.dev0/apache_beam/runners/worker
copying apache_beam/runners/worker/statesampler_fast.pyx -> apache-beam-2.5.0.dev0/apache_beam/runners/worker
copying apache_beam/runners/worker/statesampler_slow.py -> apache-beam-2.5.0.dev0/apache_beam/runners/worker
copying apache_beam/runners/worker/statesampler_test.py -> apache-beam-2.5.0.dev0/apache_beam/runners/worker
copying apache_beam/runners/worker/worker_id_interceptor.py -> apache-beam-2.5.0.dev0/apache_beam/runners/worker
copying apache_beam/runners/worker/worker_id_interceptor_test.py -> apache-beam-2.5.0.dev0/apache_beam/runners/worker
copying apache_beam/testing/__init__.py -> apache-beam-2.5.0.dev0/apache_beam/testing
copying apache_beam/testing/pipeline_verifiers.py -> apache-beam-2.5.0.dev0/apache_beam/testing
copying apache_beam/testing/pipeline_verifiers_test.py -> apache-beam-2.5.0.dev0/apache_beam/testing
copying apache_beam/testing/test_pipeline.py -> apache-beam-2.5.0.dev0/apache_beam/testing
copying apache_beam/testing/test_pipeline_test.py -> apache-beam-2.5.0.dev0/apache_beam/testing
copying apache_beam/testing/test_stream.py -> apache-beam-2.5.0.dev0/apache_beam/testing
copying apache_beam/testing/test_stream_test.py -> apache-beam-2.5.0.dev0/apache_beam/testing
copying apache_beam/testing/test_utils.py -> apache-beam-2.5.0.dev0/apache_beam/testing
copying apache_beam/testing/test_utils_test.py -> apache-beam-2.5.0.dev0/apache_beam/testing
copying apache_beam/testing/util.py -> apache-beam-2.5.0.dev0/apache_beam/testing
copying apache_beam/testing/util_test.py -> apache-beam-2.5.0.dev0/apache_beam/testing
copying apache_beam/testing/data/standard_coders.yaml -> apache-beam-2.5.0.dev0/apache_beam/testing/data
copying apache_beam/testing/data/trigger_transcripts.yaml -> apache-beam-2.5.0.dev0/apache_beam/testing/data
copying apache_beam/tools/__init__.py -> apache-beam-2.5.0.dev0/apache_beam/tools
copying apache_beam/tools/distribution_counter_microbenchmark.py -> apache-beam-2.5.0.dev0/apache_beam/tools
copying apache_beam/tools/map_fn_microbenchmark.py -> apache-beam-2.5.0.dev0/apache_beam/tools
copying apache_beam/tools/sideinput_microbenchmark.py -> apache-beam-2.5.0.dev0/apache_beam/tools
copying apache_beam/tools/utils.py -> apache-beam-2.5.0.dev0/apache_beam/tools
copying apache_beam/transforms/__init__.py -> apache-beam-2.5.0.dev0/apache_beam/transforms
copying apache_beam/transforms/combiners.py -> apache-beam-2.5.0.dev0/apache_beam/transforms
copying apache_beam/transforms/combiners_test.py -> apache-beam-2.5.0.dev0/apache_beam/transforms
copying apache_beam/transforms/core.py -> apache-beam-2.5.0.dev0/apache_beam/transforms
copying apache_beam/transforms/create_test.py -> apache-beam-2.5.0.dev0/apache_beam/transforms
copying apache_beam/transforms/cy_combiners.pxd -> apache-beam-2.5.0.dev0/apache_beam/transforms
copying apache_beam/transforms/cy_combiners.py -> apache-beam-2.5.0.dev0/apache_beam/transforms
copying apache_beam/transforms/cy_dataflow_distribution_counter.pxd -> apache-beam-2.5.0.dev0/apache_beam/transforms
copying apache_beam/transforms/cy_dataflow_distribution_counter.pyx -> apache-beam-2.5.0.dev0/apache_beam/transforms
copying apache_beam/transforms/dataflow_distribution_counter_test.py -> apache-beam-2.5.0.dev0/apache_beam/transforms
copying apache_beam/transforms/display.py -> apache-beam-2.5.0.dev0/apache_beam/transforms
copying apache_beam/transforms/display_test.py -> apache-beam-2.5.0.dev0/apache_beam/transforms
copying apache_beam/transforms/ptransform.py -> apache-beam-2.5.0.dev0/apache_beam/transforms
copying apache_beam/transforms/ptransform_test.py -> apache-beam-2.5.0.dev0/apache_beam/transforms
copying apache_beam/transforms/py_dataflow_distribution_counter.py -> apache-beam-2.5.0.dev0/apache_beam/transforms
copying apache_beam/transforms/sideinputs.py -> apache-beam-2.5.0.dev0/apache_beam/transforms
copying apache_beam/transforms/sideinputs_test.py -> apache-beam-2.5.0.dev0/apache_beam/transforms
copying apache_beam/transforms/timeutil.py -> apache-beam-2.5.0.dev0/apache_beam/transforms
copying apache_beam/transforms/trigger.py -> apache-beam-2.5.0.dev0/apache_beam/transforms
copying apache_beam/transforms/trigger_test.py -> apache-beam-2.5.0.dev0/apache_beam/transforms
copying apache_beam/transforms/util.py -> apache-beam-2.5.0.dev0/apache_beam/transforms
copying apache_beam/transforms/util_test.py -> apache-beam-2.5.0.dev0/apache_beam/transforms
copying apache_beam/transforms/window.py -> apache-beam-2.5.0.dev0/apache_beam/transforms
copying apache_beam/transforms/window_test.py -> apache-beam-2.5.0.dev0/apache_beam/transforms
copying apache_beam/transforms/write_ptransform_test.py -> apache-beam-2.5.0.dev0/apache_beam/transforms
copying apache_beam/typehints/__init__.py -> apache-beam-2.5.0.dev0/apache_beam/typehints
copying apache_beam/typehints/decorators.py -> apache-beam-2.5.0.dev0/apache_beam/typehints
copying apache_beam/typehints/native_type_compatibility.py -> apache-beam-2.5.0.dev0/apache_beam/typehints
copying apache_beam/typehints/native_type_compatibility_test.py -> apache-beam-2.5.0.dev0/apache_beam/typehints
copying apache_beam/typehints/opcodes.py -> apache-beam-2.5.0.dev0/apache_beam/typehints
copying apache_beam/typehints/trivial_inference.py -> apache-beam-2.5.0.dev0/apache_beam/typehints
copying apache_beam/typehints/trivial_inference_test.py -> apache-beam-2.5.0.dev0/apache_beam/typehints
copying apache_beam/typehints/typecheck.py -> apache-beam-2.5.0.dev0/apache_beam/typehints
copying apache_beam/typehints/typed_pipeline_test.py -> apache-beam-2.5.0.dev0/apache_beam/typehints
copying apache_beam/typehints/typehints.py -> apache-beam-2.5.0.dev0/apache_beam/typehints
copying apache_beam/typehints/typehints_test.py -> apache-beam-2.5.0.dev0/apache_beam/typehints
copying apache_beam/utils/__init__.py -> apache-beam-2.5.0.dev0/apache_beam/utils
copying apache_beam/utils/annotations.py -> apache-beam-2.5.0.dev0/apache_beam/utils
copying apache_beam/utils/annotations_test.py -> apache-beam-2.5.0.dev0/apache_beam/utils
copying apache_beam/utils/counters.pxd -> apache-beam-2.5.0.dev0/apache_beam/utils
copying apache_beam/utils/counters.py -> apache-beam-2.5.0.dev0/apache_beam/utils
copying apache_beam/utils/counters_test.py -> apache-beam-2.5.0.dev0/apache_beam/utils
copying apache_beam/utils/plugin.py -> apache-beam-2.5.0.dev0/apache_beam/utils
copying apache_beam/utils/processes.py -> apache-beam-2.5.0.dev0/apache_beam/utils
copying apache_beam/utils/processes_test.py -> apache-beam-2.5.0.dev0/apache_beam/utils
copying apache_beam/utils/profiler.py -> apache-beam-2.5.0.dev0/apache_beam/utils
copying apache_beam/utils/proto_utils.py -> apache-beam-2.5.0.dev0/apache_beam/utils
copying apache_beam/utils/retry.py -> apache-beam-2.5.0.dev0/apache_beam/utils
copying apache_beam/utils/retry_test.py -> apache-beam-2.5.0.dev0/apache_beam/utils
copying apache_beam/utils/timestamp.py -> apache-beam-2.5.0.dev0/apache_beam/utils
copying apache_beam/utils/timestamp_test.py -> apache-beam-2.5.0.dev0/apache_beam/utils
copying apache_beam/utils/urns.py -> apache-beam-2.5.0.dev0/apache_beam/utils
copying apache_beam/utils/windowed_value.pxd -> apache-beam-2.5.0.dev0/apache_beam/utils
copying apache_beam/utils/windowed_value.py -> apache-beam-2.5.0.dev0/apache_beam/utils
copying apache_beam/utils/windowed_value_test.py -> apache-beam-2.5.0.dev0/apache_beam/utils
Writing apache-beam-2.5.0.dev0/setup.cfg
creating dist
Creating tar archive
removing 'apache-beam-2.5.0.dev0' (and everything under it)

SDK_LOCATION=$(find dist/apache-beam-*.tar.gz)
find dist/apache-beam-*.tar.gz

# Run integration tests on the Google Cloud Dataflow service
# and validate that jobs finish successfully.
echo ">>> RUNNING TEST DATAFLOW RUNNER it tests"
python setup.py nosetests \
  --attr IT \
  --nocapture \
  --processes=4 \
  --process-timeout=1800 \
  --test-pipeline-options=" \
>>> RUNNING TEST DATAFLOW RUNNER it tests
    --runner=TestDataflowRunner \
    --project=$PROJECT \
    --staging_location=$GCS_LOCATION/staging-it \
    --temp_location=$GCS_LOCATION/temp-it \
    --output=$GCS_LOCATION/py-it-cloud/output \
    --sdk_location=$SDK_LOCATION \
    --num_workers=1 \
    --sleep_secs=20"
<https://builds.apache.org/job/beam_PostCommit_Python_Verify/ws/src/sdks/python/build/gradleenv/local/lib/python2.7/site-packages/setuptools/dist.py>:388: UserWarning: Normalizing '2.5.0.dev' to '2.5.0.dev0'
  normalized_version,
running nosetests
running egg_info
writing requirements to apache_beam.egg-info/requires.txt
writing apache_beam.egg-info/PKG-INFO
writing top-level names to apache_beam.egg-info/top_level.txt
writing dependency_links to apache_beam.egg-info/dependency_links.txt
writing entry points to apache_beam.egg-info/entry_points.txt
reading manifest file 'apache_beam.egg-info/SOURCES.txt'
reading manifest template 'MANIFEST.in'
warning: no files found matching 'README.md'
warning: no files found matching 'NOTICE'
warning: no files found matching 'LICENSE'
writing manifest file 'apache_beam.egg-info/SOURCES.txt'
<https://builds.apache.org/job/beam_PostCommit_Python_Verify/ws/src/sdks/python/apache_beam/io/gcp/gcsio.py>:176: DeprecationWarning: object() takes no parameters
  super(GcsIO, cls).__new__(cls, storage_client))
<https://builds.apache.org/job/beam_PostCommit_Python_Verify/ws/src/sdks/python/apache_beam/io/gcp/gcsio.py>:176: DeprecationWarning: object() takes no parameters
  super(GcsIO, cls).__new__(cls, storage_client))
<https://builds.apache.org/job/beam_PostCommit_Python_Verify/ws/src/sdks/python/apache_beam/runners/dataflow/test_dataflow_runner.py>:52: DeprecationWarning: options is deprecated since First stable release.. References to <pipeline>.options will not be supported
  print('Found: %s.' % self.build_console_url(pipeline.options))
<https://builds.apache.org/job/beam_PostCommit_Python_Verify/ws/src/sdks/python/apache_beam/runners/dataflow/test_dataflow_runner.py>:52: DeprecationWarning: options is deprecated since First stable release.. References to <pipeline>.options will not be supported
  print('Found: %s.' % self.build_console_url(pipeline.options))
<https://builds.apache.org/job/beam_PostCommit_Python_Verify/ws/src/sdks/python/apache_beam/runners/dataflow/test_dataflow_runner.py>:52: DeprecationWarning: options is deprecated since First stable release.. References to <pipeline>.options will not be supported
  print('Found: %s.' % self.build_console_url(pipeline.options))
<https://builds.apache.org/job/beam_PostCommit_Python_Verify/ws/src/sdks/python/apache_beam/runners/dataflow/dataflow_runner.py>:782: DeprecationWarning: options is deprecated since First stable release.. References to <pipeline>.options will not be supported
  transform_node.inputs[0].pipeline.options.view_as(StandardOptions))
<https://builds.apache.org/job/beam_PostCommit_Python_Verify/ws/src/sdks/python/apache_beam/runners/dataflow/test_dataflow_runner.py>:52: DeprecationWarning: options is deprecated since First stable release.. References to <pipeline>.options will not be supported
  print('Found: %s.' % self.build_console_url(pipeline.options))
test_wordcount_fnapi_it (apache_beam.examples.wordcount_it_test.WordCountIT) ... ok
test_streaming_wordcount_it (apache_beam.examples.streaming_wordcount_it_test.StreamingWordCountIT) ... ok
test_wordcount_it (apache_beam.examples.wordcount_it_test.WordCountIT) ... ok
test_bigquery_tornadoes_it (apache_beam.examples.cookbook.bigquery_tornadoes_it_test.BigqueryTornadoesIT) ... ok

----------------------------------------------------------------------
XML: <https://builds.apache.org/job/beam_PostCommit_Python_Verify/ws/src/sdks/python/nosetests.xml>
----------------------------------------------------------------------
Ran 4 tests in 449.712s

OK
Found: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2018-05-09_10_15_52-10117564333159246724?project=apache-beam-testing.
Found: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2018-05-09_10_15_45-810705382252696130?project=apache-beam-testing.
Found: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2018-05-09_10_15_43-10808117976966396274?project=apache-beam-testing.
Found: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2018-05-09_10_15_44-12725380867657108921?project=apache-beam-testing.
:beam-sdks-python:validatesRunnerTests (Thread[Task worker for ':' Thread 13,5,main]) completed. Took 7 mins 31.196 secs.
:beam-sdks-python:postCommit (Thread[Task worker for ':' Thread 13,5,main]) started.

> Task :beam-sdks-python:postCommit
Skipping task ':beam-sdks-python:postCommit' as it has no actions.
:beam-sdks-python:postCommit (Thread[Task worker for ':' Thread 13,5,main]) completed. Took 0.0 secs.
:pythonPostCommit (Thread[Task worker for ':' Thread 13,5,main]) started.

> Task :pythonPostCommit
Skipping task ':pythonPostCommit' as it has no actions.
:pythonPostCommit (Thread[Task worker for ':' Thread 13,5,main]) completed. Took 0.0 secs.

BUILD SUCCESSFUL in 38m 30s
11 actionable tasks: 11 executed

Publishing build scan...
https://gradle.com/s/f26tntgrhxtca

Build step 'Invoke Gradle script' changed build result to SUCCESS
[Set GitHub commit status (universal)] SUCCESS on repos [GHRepository@37ade967[description=Apache Beam,homepage=,name=beam,fork=false,size=58730,milestones={},language=Java,commits={},source=<null>,parent=<null>,responseHeaderFields={null=[HTTP/1.1 200 OK], Access-Control-Allow-Origin=[*], Access-Control-Expose-Headers=[ETag, Link, Retry-After, X-GitHub-OTP, X-RateLimit-Limit, X-RateLimit-Remaining, X-RateLimit-Reset, X-OAuth-Scopes, X-Accepted-OAuth-Scopes, X-Poll-Interval], Cache-Control=[private, max-age=60, s-maxage=60], Content-Encoding=[gzip], Content-Security-Policy=[default-src 'none'], Content-Type=[application/json; charset=utf-8], Date=[Wed, 09 May 2018 17:23:13 GMT], ETag=[W/"a77c79df4f0adfc709a03447bf0ea09e"], Last-Modified=[Wed, 09 May 2018 16:51:10 GMT], OkHttp-Received-Millis=[1525886593424], OkHttp-Response-Source=[NETWORK 200], OkHttp-Selected-Protocol=[http/1.1], OkHttp-Sent-Millis=[1525886593145], Referrer-Policy=[origin-when-cross-origin, strict-origin-when-cross-origin], Server=[GitHub.com], Status=[200 OK], Strict-Transport-Security=[max-age=31536000; includeSubdomains; preload], Transfer-Encoding=[chunked], Vary=[Accept, Authorization, Cookie, X-GitHub-OTP], X-Accepted-OAuth-Scopes=[repo], X-Content-Type-Options=[nosniff], X-Frame-Options=[deny], X-GitHub-Media-Type=[github.v3; format=json], X-GitHub-Request-Id=[AA06:4F41:11D1779:263BBDD:5AF32E81], X-OAuth-Scopes=[admin:repo_hook, repo, repo:status], X-RateLimit-Limit=[5000], X-RateLimit-Remaining=[4979], X-RateLimit-Reset=[1525889764], X-Runtime-rack=[0.073406], X-XSS-Protection=[1; mode=block]},url=https://api.github.com/repos/apache/beam,id=50904245]] (sha:f805d4d) with context:beam_PostCommit_Python_Verify
Setting commit status on GitHub for https://github.com/apache/beam/commit/f805d4d1088112377f929226c0e824d219c19ebf
ERROR: Build step failed with exception
java.io.FileNotFoundException: https://api.github.com/repos/apache/beam/statuses/f805d4d1088112377f929226c0e824d219c19ebf
	at com.squareup.okhttp.internal.huc.HttpURLConnectionImpl.getInputStream(HttpURLConnectionImpl.java:243)
	at com.squareup.okhttp.internal.huc.DelegatingHttpsURLConnection.getInputStream(DelegatingHttpsURLConnection.java:210)
	at com.squareup.okhttp.internal.huc.HttpsURLConnectionImpl.getInputStream(HttpsURLConnectionImpl.java:25)
	at org.kohsuke.github.Requester.parse(Requester.java:612)
	at org.kohsuke.github.Requester.parse(Requester.java:594)
	at org.kohsuke.github.Requester._to(Requester.java:272)
Caused: org.kohsuke.github.GHFileNotFoundException: {"message":"Not Found","documentation_url":"https://developer.github.com/v3/repos/statuses/#create-a-status"}
	at org.kohsuke.github.Requester.handleApiError(Requester.java:686)
	at org.kohsuke.github.Requester._to(Requester.java:293)
	at org.kohsuke.github.Requester.to(Requester.java:234)
	at org.kohsuke.github.GHRepository.createCommitStatus(GHRepository.java:1075)
	at org.jenkinsci.plugins.github.status.GitHubCommitStatusSetter.perform(GitHubCommitStatusSetter.java:160)
Caused: org.jenkinsci.plugins.github.common.CombineErrorHandler$ErrorHandlingException
	at org.jenkinsci.plugins.github.common.CombineErrorHandler.handle(CombineErrorHandler.java:74)
	at org.jenkinsci.plugins.github.status.GitHubCommitStatusSetter.perform(GitHubCommitStatusSetter.java:164)
	at com.cloudbees.jenkins.GitHubCommitNotifier.perform(GitHubCommitNotifier.java:151)
	at hudson.tasks.BuildStepCompatibilityLayer.perform(BuildStepCompatibilityLayer.java:81)
	at hudson.tasks.BuildStepMonitor$1.perform(BuildStepMonitor.java:20)
	at hudson.model.AbstractBuild$AbstractBuildExecution.perform(AbstractBuild.java:744)
	at hudson.model.AbstractBuild$AbstractBuildExecution.performAllBuildSteps(AbstractBuild.java:690)
	at hudson.model.Build$BuildExecution.post2(Build.java:186)
	at hudson.model.AbstractBuild$AbstractBuildExecution.post(AbstractBuild.java:635)
	at hudson.model.Run.execute(Run.java:1749)
	at hudson.model.FreeStyleBuild.run(FreeStyleBuild.java:43)
	at hudson.model.ResourceController.execute(ResourceController.java:97)
	at hudson.model.Executor.run(Executor.java:429)
Build step 'Set build status on GitHub commit [deprecated]' marked build as failure

Build failed in Jenkins: beam_PostCommit_Python_Verify #4907

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_Python_Verify/4907/display/redirect?page=changes>

Changes:

[Pablo] StateSampler knows the execution thread it tracks.

------------------------------------------
[...truncated 1.27 MB...]
namenode_1  | 18/05/09 16:43:13 INFO namenode.FSNamesystem: dfs.namenode.safemode.threshold-pct = 0.9990000128746033
namenode_1  | 18/05/09 16:43:13 INFO namenode.FSNamesystem: dfs.namenode.safemode.min.datanodes = 0
namenode_1  | 18/05/09 16:43:13 INFO namenode.FSNamesystem: dfs.namenode.safemode.extension     = 30000
namenode_1  | 18/05/09 16:43:13 INFO metrics.TopMetrics: NNTop conf: dfs.namenode.top.window.num.buckets = 10
namenode_1  | 18/05/09 16:43:13 INFO metrics.TopMetrics: NNTop conf: dfs.namenode.top.num.users = 10
namenode_1  | 18/05/09 16:43:13 INFO metrics.TopMetrics: NNTop conf: dfs.namenode.top.windows.minutes = 1,5,25
namenode_1  | 18/05/09 16:43:13 INFO namenode.FSNamesystem: Retry cache on namenode is enabled
namenode_1  | 18/05/09 16:43:13 INFO namenode.FSNamesystem: Retry cache will use 0.03 of total heap and retry cache entry expiry time is 600000 millis
namenode_1  | 18/05/09 16:43:13 INFO util.GSet: Computing capacity for map NameNodeRetryCache
namenode_1  | 18/05/09 16:43:13 INFO util.GSet: VM type       = 64-bit
namenode_1  | 18/05/09 16:43:13 INFO util.GSet: 0.029999999329447746% max memory 958.5 MB = 294.5 KB
namenode_1  | 18/05/09 16:43:13 INFO util.GSet: capacity      = 2^15 = 32768 entries
namenode_1  | 18/05/09 16:43:13 INFO common.Storage: Lock on /hadoop/dfs/name/in_use.lock acquired by nodename 143@namenode
namenode_1  | 18/05/09 16:43:13 INFO namenode.FileJournalManager: Recovering unfinalized segments in /hadoop/dfs/name/current
namenode_1  | 18/05/09 16:43:13 INFO namenode.FSImage: No edit log streams selected.
namenode_1  | 18/05/09 16:43:13 INFO namenode.FSImage: Planning to load image: FSImageFile(file=/hadoop/dfs/name/current/fsimage_0000000000000000000, cpktTxId=0000000000000000000)
namenode_1  | 18/05/09 16:43:13 INFO namenode.FSImageFormatPBINode: Loading 1 INodes.
namenode_1  | 18/05/09 16:43:13 INFO namenode.FSImageFormatProtobuf: Loaded FSImage in 0 seconds.
namenode_1  | 18/05/09 16:43:13 INFO namenode.FSImage: Loaded image for txid 0 from /hadoop/dfs/name/current/fsimage_0000000000000000000
namenode_1  | 18/05/09 16:43:13 INFO namenode.FSNamesystem: Need to save fs image? false (staleImage=false, haEnabled=false, isRollingUpgrade=false)
namenode_1  | 18/05/09 16:43:13 INFO namenode.FSEditLog: Starting log segment at 1
namenode_1  | 18/05/09 16:43:14 INFO namenode.NameCache: initialized with 0 entries 0 lookups
namenode_1  | 18/05/09 16:43:14 INFO namenode.FSNamesystem: Finished loading FSImage in 314 msecs
namenode_1  | 18/05/09 16:43:14 INFO namenode.NameNode: RPC server is binding to 0.0.0.0:8020
namenode_1  | 18/05/09 16:43:14 INFO ipc.CallQueueManager: Using callQueue: class java.util.concurrent.LinkedBlockingQueue queueCapacity: 1000 scheduler: class org.apache.hadoop.ipc.DefaultRpcScheduler
namenode_1  | 18/05/09 16:43:14 INFO ipc.Server: Starting Socket Reader #1 for port 8020
datanode_1  | 18/05/09 16:43:14 INFO ipc.Client: Retrying connect to server: namenode/172.18.0.2:8020. Already tried 0 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1000 MILLISECONDS)
namenode_1  | 18/05/09 16:43:14 INFO namenode.FSNamesystem: Registered FSNamesystemState MBean
namenode_1  | 18/05/09 16:43:14 INFO namenode.LeaseManager: Number of blocks under construction: 0
namenode_1  | 18/05/09 16:43:14 INFO blockmanagement.BlockManager: initializing replication queues
namenode_1  | 18/05/09 16:43:14 INFO hdfs.StateChange: STATE* Leaving safe mode after 0 secs
namenode_1  | 18/05/09 16:43:14 INFO hdfs.StateChange: STATE* Network topology has 0 racks and 0 datanodes
namenode_1  | 18/05/09 16:43:14 INFO hdfs.StateChange: STATE* UnderReplicatedBlocks has 0 blocks
namenode_1  | 18/05/09 16:43:14 INFO blockmanagement.BlockManager: Total number of blocks            = 0
namenode_1  | 18/05/09 16:43:14 INFO blockmanagement.BlockManager: Number of invalid blocks          = 0
namenode_1  | 18/05/09 16:43:14 INFO blockmanagement.BlockManager: Number of under-replicated blocks = 0
namenode_1  | 18/05/09 16:43:14 INFO blockmanagement.BlockManager: Number of  over-replicated blocks = 0
namenode_1  | 18/05/09 16:43:14 INFO blockmanagement.BlockManager: Number of blocks being written    = 0
namenode_1  | 18/05/09 16:43:14 INFO hdfs.StateChange: STATE* Replication Queue initialization scan for invalid, over- and under-replicated blocks completed in 13 msec
namenode_1  | 18/05/09 16:43:14 INFO ipc.Server: IPC Server Responder: starting
namenode_1  | 18/05/09 16:43:14 INFO ipc.Server: IPC Server listener on 8020: starting
namenode_1  | 18/05/09 16:43:14 INFO namenode.NameNode: NameNode RPC up at: namenode/172.18.0.2:8020
namenode_1  | 18/05/09 16:43:14 INFO namenode.FSNamesystem: Starting services required for active state
namenode_1  | 18/05/09 16:43:14 INFO namenode.FSDirectory: Initializing quota with 4 thread(s)
namenode_1  | 18/05/09 16:43:14 INFO namenode.FSDirectory: Quota initialization completed in 4 milliseconds
namenode_1  | name space=1
namenode_1  | storage space=0
namenode_1  | storage types=RAM_DISK=0, SSD=0, DISK=0, ARCHIVE=0
namenode_1  | 18/05/09 16:43:14 INFO blockmanagement.CacheReplicationMonitor: Starting CacheReplicationMonitor with interval 30000 milliseconds
datanode_1  | 18/05/09 16:43:14 INFO datanode.DataNode: Acknowledging ACTIVE Namenode during handshakeBlock pool <registering> (Datanode Uuid unassigned) service to namenode/172.18.0.2:8020
datanode_1  | 18/05/09 16:43:14 INFO common.Storage: Using 1 threads to upgrade data directories (dfs.datanode.parallel.volumes.load.threads.num=1, dataDirs=1)
datanode_1  | 18/05/09 16:43:14 INFO common.Storage: Lock on /hadoop/dfs/data/in_use.lock acquired by nodename 83@datanode
datanode_1  | 18/05/09 16:43:14 INFO common.Storage: Storage directory /hadoop/dfs/data is not formatted for namespace 1508204439. Formatting...
datanode_1  | 18/05/09 16:43:14 INFO common.Storage: Generated new storageID DS-869441b7-b2fc-4c34-a0f4-6cf6085044dd for directory /hadoop/dfs/data
datanode_1  | 18/05/09 16:43:14 INFO common.Storage: Analyzing storage directories for bpid BP-1005096461-172.18.0.2-1525884191852
datanode_1  | 18/05/09 16:43:14 INFO common.Storage: Locking is disabled for /hadoop/dfs/data/current/BP-1005096461-172.18.0.2-1525884191852
datanode_1  | 18/05/09 16:43:14 INFO common.Storage: Block pool storage directory /hadoop/dfs/data/current/BP-1005096461-172.18.0.2-1525884191852 is not formatted for BP-1005096461-172.18.0.2-1525884191852. Formatting ...
datanode_1  | 18/05/09 16:43:14 INFO common.Storage: Formatting block pool BP-1005096461-172.18.0.2-1525884191852 directory /hadoop/dfs/data/current/BP-1005096461-172.18.0.2-1525884191852/current
datanode_1  | 18/05/09 16:43:14 INFO datanode.DataNode: Setting up storage: nsid=1508204439;bpid=BP-1005096461-172.18.0.2-1525884191852;lv=-57;nsInfo=lv=-63;cid=CID-f8bf973b-564b-4ef4-9af8-8252453996f4;nsid=1508204439;c=1525884191852;bpid=BP-1005096461-172.18.0.2-1525884191852;dnuuid=null
datanode_1  | 18/05/09 16:43:14 INFO datanode.DataNode: Generated and persisted new Datanode UUID 8707ff19-88bb-42ee-8b57-c46c2d20e77c
datanode_1  | 18/05/09 16:43:14 INFO impl.FsDatasetImpl: Added new volume: DS-869441b7-b2fc-4c34-a0f4-6cf6085044dd
datanode_1  | 18/05/09 16:43:14 INFO impl.FsDatasetImpl: Added volume - /hadoop/dfs/data/current, StorageType: DISK
datanode_1  | 18/05/09 16:43:14 INFO impl.FsDatasetImpl: Registered FSDatasetState MBean
datanode_1  | 18/05/09 16:43:14 INFO impl.FsDatasetImpl: Volume reference is released.
datanode_1  | 18/05/09 16:43:14 INFO impl.FsDatasetImpl: Adding block pool BP-1005096461-172.18.0.2-1525884191852
datanode_1  | 18/05/09 16:43:14 INFO impl.FsDatasetImpl: Scanning block pool BP-1005096461-172.18.0.2-1525884191852 on volume /hadoop/dfs/data/current...
datanode_1  | 18/05/09 16:43:14 INFO impl.FsDatasetImpl: Time taken to scan block pool BP-1005096461-172.18.0.2-1525884191852 on /hadoop/dfs/data/current: 22ms
datanode_1  | 18/05/09 16:43:14 INFO impl.FsDatasetImpl: Total time to scan all replicas for block pool BP-1005096461-172.18.0.2-1525884191852: 23ms
datanode_1  | 18/05/09 16:43:14 INFO impl.FsDatasetImpl: Adding replicas to map for block pool BP-1005096461-172.18.0.2-1525884191852 on volume /hadoop/dfs/data/current...
datanode_1  | 18/05/09 16:43:14 INFO impl.BlockPoolSlice: Replica Cache file: /hadoop/dfs/data/current/BP-1005096461-172.18.0.2-1525884191852/current/replicas doesn't exist 
datanode_1  | 18/05/09 16:43:14 INFO impl.FsDatasetImpl: Time to add replicas to map for block pool BP-1005096461-172.18.0.2-1525884191852 on volume /hadoop/dfs/data/current: 0ms
datanode_1  | 18/05/09 16:43:14 INFO impl.FsDatasetImpl: Total time to add all replicas to map: 1ms
datanode_1  | 18/05/09 16:43:14 INFO datanode.VolumeScanner: Now scanning bpid BP-1005096461-172.18.0.2-1525884191852 on volume /hadoop/dfs/data
datanode_1  | 18/05/09 16:43:14 INFO datanode.VolumeScanner: VolumeScanner(/hadoop/dfs/data, DS-869441b7-b2fc-4c34-a0f4-6cf6085044dd): finished scanning block pool BP-1005096461-172.18.0.2-1525884191852
datanode_1  | 18/05/09 16:43:14 INFO datanode.DirectoryScanner: Periodic Directory Tree Verification scan starting at 5/9/18 9:19 PM with interval of 21600000ms
datanode_1  | 18/05/09 16:43:14 INFO datanode.DataNode: Block pool BP-1005096461-172.18.0.2-1525884191852 (Datanode Uuid 8707ff19-88bb-42ee-8b57-c46c2d20e77c) service to namenode/172.18.0.2:8020 beginning handshake with NN
datanode_1  | 18/05/09 16:43:14 INFO datanode.VolumeScanner: VolumeScanner(/hadoop/dfs/data, DS-869441b7-b2fc-4c34-a0f4-6cf6085044dd): no suitable block pools found to scan.  Waiting 1814399970 ms.
namenode_1  | 18/05/09 16:43:14 INFO hdfs.StateChange: BLOCK* registerDatanode: from DatanodeRegistration(172.18.0.3:50010, datanodeUuid=8707ff19-88bb-42ee-8b57-c46c2d20e77c, infoPort=50075, infoSecurePort=0, ipcPort=50020, storageInfo=lv=-57;cid=CID-f8bf973b-564b-4ef4-9af8-8252453996f4;nsid=1508204439;c=1525884191852) storage 8707ff19-88bb-42ee-8b57-c46c2d20e77c
namenode_1  | 18/05/09 16:43:14 INFO net.NetworkTopology: Adding a new node: /default-rack/172.18.0.3:50010
namenode_1  | 18/05/09 16:43:14 INFO blockmanagement.BlockReportLeaseManager: Registered DN 8707ff19-88bb-42ee-8b57-c46c2d20e77c (172.18.0.3:50010).
datanode_1  | 18/05/09 16:43:14 INFO datanode.DataNode: Block pool Block pool BP-1005096461-172.18.0.2-1525884191852 (Datanode Uuid 8707ff19-88bb-42ee-8b57-c46c2d20e77c) service to namenode/172.18.0.2:8020 successfully registered with NN
datanode_1  | 18/05/09 16:43:14 INFO datanode.DataNode: For namenode namenode/172.18.0.2:8020 using BLOCKREPORT_INTERVAL of 21600000msec CACHEREPORT_INTERVAL of 10000msec Initial delay: 0msec; heartBeatInterval=3000
namenode_1  | 18/05/09 16:43:14 INFO blockmanagement.DatanodeDescriptor: Adding new storage ID DS-869441b7-b2fc-4c34-a0f4-6cf6085044dd for DN 172.18.0.3:50010
namenode_1  | 18/05/09 16:43:14 INFO BlockStateChange: BLOCK* processReport 0x8dae1208a2f4c12c: Processing first storage report for DS-869441b7-b2fc-4c34-a0f4-6cf6085044dd from datanode 8707ff19-88bb-42ee-8b57-c46c2d20e77c
namenode_1  | 18/05/09 16:43:14 INFO BlockStateChange: BLOCK* processReport 0x8dae1208a2f4c12c: from storage DS-869441b7-b2fc-4c34-a0f4-6cf6085044dd node DatanodeRegistration(172.18.0.3:50010, datanodeUuid=8707ff19-88bb-42ee-8b57-c46c2d20e77c, infoPort=50075, infoSecurePort=0, ipcPort=50020, storageInfo=lv=-57;cid=CID-f8bf973b-564b-4ef4-9af8-8252453996f4;nsid=1508204439;c=1525884191852), blocks: 0, hasStaleStorage: false, processing time: 2 msecs, invalidatedBlocks: 0
datanode_1  | 18/05/09 16:43:14 INFO datanode.DataNode: Successfully sent block report 0x8dae1208a2f4c12c,  containing 1 storage report(s), of which we sent 1. The reports had 0 total blocks and used 1 RPC(s). This took 4 msec to generate and 54 msecs for RPC and NN processing. Got back one command: FinalizeCommand/5.
datanode_1  | 18/05/09 16:43:14 INFO datanode.DataNode: Got finalize command for block pool BP-1005096461-172.18.0.2-1525884191852
test_1      | INFO	Instantiated configuration from '/app/sdks/python/apache_beam/io/hdfs_integration_test/hdfscli.cfg'.
test_1      | INFO	Instantiated <InsecureClient(url='http://namenode:50070')>.
test_1      | INFO	Uploading 'kinglear.txt' to '/'.
test_1      | DEBUG	Resolved path '/' to '/'.
test_1      | INFO	Listing '/'.
test_1      | DEBUG	Resolved path '/' to '/'.
test_1      | DEBUG	Resolved path '/' to '/'.
test_1      | DEBUG	Starting new HTTP connection (1): namenode
namenode_1  | May 09, 2018 4:43:59 PM com.sun.jersey.api.core.PackagesResourceConfig init
namenode_1  | INFO: Scanning for root resource and provider classes in the packages:
namenode_1  |   org.apache.hadoop.hdfs.server.namenode.web.resources
namenode_1  |   org.apache.hadoop.hdfs.web.resources
namenode_1  | May 09, 2018 4:43:59 PM com.sun.jersey.api.core.ScanningResourceConfig logClasses
namenode_1  | INFO: Root resource classes found:
namenode_1  |   class org.apache.hadoop.hdfs.server.namenode.web.resources.NamenodeWebHdfsMethods
namenode_1  | May 09, 2018 4:43:59 PM com.sun.jersey.api.core.ScanningResourceConfig logClasses
namenode_1  | INFO: Provider classes found:
namenode_1  |   class org.apache.hadoop.hdfs.web.resources.UserProvider
namenode_1  |   class org.apache.hadoop.hdfs.web.resources.ExceptionHandler
namenode_1  | May 09, 2018 4:43:59 PM com.sun.jersey.server.impl.application.WebApplicationImpl _initiate
namenode_1  | INFO: Initiating Jersey application, version 'Jersey: 1.9 09/02/2011 11:17 AM'
namenode_1  | May 09, 2018 4:44:00 PM com.sun.jersey.spi.inject.Errors processErrorMessages
namenode_1  | WARNING: The following warnings have been detected with resource and/or provider classes:
namenode_1  |   WARNING: A sub-resource method, public javax.ws.rs.core.Response org.apache.hadoop.hdfs.server.namenode.web.resources.NamenodeWebHdfsMethods.postRoot(org.apache.hadoop.security.UserGroupInformation,org.apache.hadoop.hdfs.web.resources.DelegationParam,org.apache.hadoop.hdfs.web.resources.UserParam,org.apache.hadoop.hdfs.web.resources.DoAsParam,org.apache.hadoop.hdfs.web.resources.PostOpParam,org.apache.hadoop.hdfs.web.resources.ConcatSourcesParam,org.apache.hadoop.hdfs.web.resources.BufferSizeParam,org.apache.hadoop.hdfs.web.resources.ExcludeDatanodesParam,org.apache.hadoop.hdfs.web.resources.NewLengthParam) throws java.io.IOException,java.lang.InterruptedException, with URI template, "/", is treated as a resource method
namenode_1  |   WARNING: A sub-resource method, public javax.ws.rs.core.Response org.apache.hadoop.hdfs.server.namenode.web.resources.NamenodeWebHdfsMethods.putRoot(org.apache.hadoop.security.UserGroupInformation,org.apache.hadoop.hdfs.web.resources.DelegationParam,org.apache.hadoop.hdfs.web.resources.UserParam,org.apache.hadoop.hdfs.web.resources.DoAsParam,org.apache.hadoop.hdfs.web.resources.PutOpParam,org.apache.hadoop.hdfs.web.resources.DestinationParam,org.apache.hadoop.hdfs.web.resources.OwnerParam,org.apache.hadoop.hdfs.web.resources.GroupParam,org.apache.hadoop.hdfs.web.resources.PermissionParam,org.apache.hadoop.hdfs.web.resources.OverwriteParam,org.apache.hadoop.hdfs.web.resources.BufferSizeParam,org.apache.hadoop.hdfs.web.resources.ReplicationParam,org.apache.hadoop.hdfs.web.resources.BlockSizeParam,org.apache.hadoop.hdfs.web.resources.ModificationTimeParam,org.apache.hadoop.hdfs.web.resources.AccessTimeParam,org.apache.hadoop.hdfs.web.resources.RenameOptionSetParam,org.apache.hadoop.hdfs.web.resources.CreateParentParam,org.apache.hadoop.hdfs.web.resources.TokenArgumentParam,org.apache.hadoop.hdfs.web.resources.AclPermissionParam,org.apache.hadoop.hdfs.web.resources.XAttrNameParam,org.apache.hadoop.hdfs.web.resources.XAttrValueParam,org.apache.hadoop.hdfs.web.resources.XAttrSetFlagParam,org.apache.hadoop.hdfs.web.resources.SnapshotNameParam,org.apache.hadoop.hdfs.web.resources.OldSnapshotNameParam,org.apache.hadoop.hdfs.web.resources.ExcludeDatanodesParam,org.apache.hadoop.hdfs.web.resources.CreateFlagParam,org.apache.hadoop.hdfs.web.resources.StoragePolicyParam) throws java.io.IOException,java.lang.InterruptedException, with URI template, "/", is treated as a resource method
namenode_1  |   WARNING: A sub-resource method, public javax.ws.rs.core.Response org.apache.hadoop.hdfs.server.namenode.web.resources.NamenodeWebHdfsMethods.deleteRoot(org.apache.hadoop.security.UserGroupInformation,org.apache.hadoop.hdfs.web.resources.DelegationParam,org.apache.hadoop.hdfs.web.resources.UserParam,org.apache.hadoop.hdfs.web.resources.DoAsParam,org.apache.hadoop.hdfs.web.resources.DeleteOpParam,org.apache.hadoop.hdfs.web.resources.RecursiveParam,org.apache.hadoop.hdfs.web.resources.SnapshotNameParam) throws java.io.IOException,java.lang.InterruptedException, with URI template, "/", is treated as a resource method
namenode_1  |   WARNING: A sub-resource method, public javax.ws.rs.core.Response org.apache.hadoop.hdfs.server.namenode.web.resources.NamenodeWebHdfsMethods.getRoot(org.apache.hadoop.security.UserGroupInformation,org.apache.hadoop.hdfs.web.resources.DelegationParam,org.apache.hadoop.hdfs.web.resources.UserParam,org.apache.hadoop.hdfs.web.resources.DoAsParam,org.apache.hadoop.hdfs.web.resources.GetOpParam,org.apache.hadoop.hdfs.web.resources.OffsetParam,org.apache.hadoop.hdfs.web.resources.LengthParam,org.apache.hadoop.hdfs.web.resources.RenewerParam,org.apache.hadoop.hdfs.web.resources.BufferSizeParam,java.util.List,org.apache.hadoop.hdfs.web.resources.XAttrEncodingParam,org.apache.hadoop.hdfs.web.resources.ExcludeDatanodesParam,org.apache.hadoop.hdfs.web.resources.FsActionParam,org.apache.hadoop.hdfs.web.resources.TokenKindParam,org.apache.hadoop.hdfs.web.resources.TokenServiceParam) throws java.io.IOException,java.lang.InterruptedException, with URI template, "/", is treated as a resource method
test_1      | DEBUG	http://namenode:50070 "GET /webhdfs/v1/?user.name=root&op=LISTSTATUS HTTP/1.1" 200 None
test_1      | DEBUG	Uploading 1 files using 1 thread(s).
test_1      | DEBUG	Uploading 'kinglear.txt' to '/kinglear.txt'.
test_1      | INFO	Writing to '/kinglear.txt'.
test_1      | DEBUG	Resolved path '/kinglear.txt' to '/kinglear.txt'.
test_1      | DEBUG	http://namenode:50070 "PUT /webhdfs/v1/kinglear.txt?user.name=root&overwrite=False&op=CREATE HTTP/1.1" 307 0
test_1      | DEBUG	Starting new HTTP connection (1): datanode
datanode_1  | 18/05/09 16:44:00 INFO datanode.webhdfs: 172.18.0.4 PUT /webhdfs/v1/kinglear.txt?op=CREATE&user.name=root&namenoderpcaddress=namenode:8020&createflag=&createparent=true&overwrite=false&user.name=root 201
namenode_1  | 18/05/09 16:44:00 INFO hdfs.StateChange: BLOCK* allocate blk_1073741825_1001, replicas=172.18.0.3:50010 for /kinglear.txt
datanode_1  | 18/05/09 16:44:00 INFO datanode.DataNode: Receiving BP-1005096461-172.18.0.2-1525884191852:blk_1073741825_1001 src: /172.18.0.3:39638 dest: /172.18.0.3:50010
datanode_1  | 18/05/09 16:44:00 INFO DataNode.clienttrace: src: /172.18.0.3:39638, dest: /172.18.0.3:50010, bytes: 157283, op: HDFS_WRITE, cliID: DFSClient_NONMAPREDUCE_598309046_67, offset: 0, srvID: 8707ff19-88bb-42ee-8b57-c46c2d20e77c, blockid: BP-1005096461-172.18.0.2-1525884191852:blk_1073741825_1001, duration: 12300698
datanode_1  | 18/05/09 16:44:00 INFO datanode.DataNode: PacketResponder: BP-1005096461-172.18.0.2-1525884191852:blk_1073741825_1001, type=LAST_IN_PIPELINE terminating
namenode_1  | 18/05/09 16:44:00 INFO namenode.FSNamesystem: BLOCK* blk_1073741825_1001 is COMMITTED but not COMPLETE(numNodes= 0 <  minimum = 1) in file /kinglear.txt
namenode_1  | 18/05/09 16:44:00 INFO namenode.EditLogFileOutputStream: Nothing to flush
namenode_1  | 18/05/09 16:44:01 INFO hdfs.StateChange: DIR* completeFile: /kinglear.txt is closed by DFSClient_NONMAPREDUCE_598309046_67
test_1      | DEBUG	Upload of 'kinglear.txt' to '/kinglear.txt' complete.
test_1      | INFO:root:Missing pipeline option (runner). Executing pipeline using the default runner: DirectRunner.
test_1      | INFO:root:==================== <function annotate_downstream_side_inputs at 0x7fbbd53fa7d0> ====================
test_1      | INFO:root:==================== <function lift_combiners at 0x7fbbd53fae60> ====================
test_1      | INFO:root:==================== <function expand_gbk at 0x7fbbd53fa6e0> ====================
test_1      | INFO:root:==================== <function sink_flattens at 0x7fbbd53fa140> ====================
test_1      | INFO:root:==================== <function greedily_fuse at 0x7fbbd53fa8c0> ====================
test_1      | INFO:root:==================== <function sort_stages at 0x7fbbd53fa410> ====================
test_1      | INFO:root:Running (((ref_AppliedPTransform_write/Write/WriteImpl/DoOnce/Read_16)+(ref_AppliedPTransform_write/Write/WriteImpl/InitializeWrite_17))+(ref_PCollection_PCollection_9/Write))+(ref_PCollection_PCollection_10/Write)
test_1      | INFO:root:start <DataOutputOperation ref_PCollection_PCollection_10/Write >
test_1      | INFO:root:start <DataOutputOperation ref_PCollection_PCollection_9/Write >
test_1      | INFO:root:start <DoOperation write/Write/WriteImpl/InitializeWrite output_tags=['out']>
test_1      | INFO:root:start <ReadOperation write/Write/WriteImpl/DoOnce/Read source=SourceBundle(weight=1.0, source=<apache_beam.transforms.core._CreateSource object at 0x7fbbd53c1d10>, start_position=None, stop_position=None)>
test_1      | INFO:root:finish <ReadOperation write/Write/WriteImpl/DoOnce/Read source=SourceBundle(weight=1.0, source=<apache_beam.transforms.core._CreateSource object at 0x7fbbd53c1d10>, start_position=None, stop_position=None), receivers=[ConsumerSet[write/Write/WriteImpl/DoOnce/Read.out0, coder=WindowedValueCoder[FastPrimitivesCoder], len(consumers)=2]]>
test_1      | INFO:root:finish <DoOperation write/Write/WriteImpl/InitializeWrite output_tags=['out'], receivers=[ConsumerSet[write/Write/WriteImpl/InitializeWrite.out0, coder=WindowedValueCoder[FastPrimitivesCoder], len(consumers)=1]]>
test_1      | INFO:root:finish <DataOutputOperation ref_PCollection_PCollection_9/Write >
test_1      | INFO:root:finish <DataOutputOperation ref_PCollection_PCollection_10/Write >
test_1      | INFO:root:Running (ref_AppliedPTransform_read/Read_3)+((ref_AppliedPTransform_split_4)+((ref_AppliedPTransform_pair_with_one_5)+(group/Write)))
test_1      | INFO:root:start <DataOutputOperation group/Write >
test_1      | INFO:root:start <DoOperation pair_with_one output_tags=['out']>
test_1      | INFO:root:start <DoOperation split output_tags=['out']>
test_1      | INFO:root:start <ReadOperation read/Read source=SourceBundle(weight=1.0, source=<apache_beam.io.textio._TextSource object at 0x7fbbd53d0910>, start_position=None, stop_position=None)>
datanode_1  | 18/05/09 16:44:02 INFO datanode.webhdfs: 172.18.0.4 GET /webhdfs/v1/kinglear.txt?op=OPEN&user.name=root&namenoderpcaddress=namenode:8020&length=157284&offset=0 200
test_1      | INFO:root:finish <ReadOperation read/Read source=SourceBundle(weight=1.0, source=<apache_beam.io.textio._TextSource object at 0x7fbbd53d0910>, start_position=None, stop_position=None), receivers=[ConsumerSet[read/Read.out0, coder=WindowedValueCoder[FastPrimitivesCoder], len(consumers)=1]]>
test_1      | INFO:root:finish <DoOperation split output_tags=['out'], receivers=[ConsumerSet[split.out0, coder=WindowedValueCoder[StrUtf8Coder], len(consumers)=1]]>
test_1      | INFO:root:finish <DoOperation pair_with_one output_tags=['out'], receivers=[ConsumerSet[pair_with_one.out0, coder=WindowedValueCoder[TupleCoder[LengthPrefixCoder[StrUtf8Coder], VarIntCoder]], len(consumers)=1]]>
test_1      | INFO:root:finish <DataOutputOperation group/Write >
test_1      | INFO:root:Running (((group/Read)+((ref_AppliedPTransform_count_10)+(ref_AppliedPTransform_format_11)))+(ref_AppliedPTransform_write/Write/WriteImpl/WriteBundles_18))+((ref_AppliedPTransform_write/Write/WriteImpl/Pair_19)+((ref_AppliedPTransform_write/Write/WriteImpl/WindowInto(WindowIntoFn)_20)+(write/Write/WriteImpl/GroupByKey/Write)))
test_1      | INFO:root:start <DataOutputOperation write/Write/WriteImpl/GroupByKey/Write >
test_1      | INFO:root:start <DoOperation write/Write/WriteImpl/WindowInto(WindowIntoFn) output_tags=['out']>
test_1      | INFO:root:start <DoOperation write/Write/WriteImpl/Pair output_tags=['out']>
test_1      | INFO:root:start <DoOperation write/Write/WriteImpl/WriteBundles output_tags=['out']>
test_1      | INFO:root:start <DoOperation format output_tags=['out']>
test_1      | INFO:root:start <DoOperation count output_tags=['out']>
test_1      | INFO:root:start <DataInputOperation group/Read receivers=[ConsumerSet[group/Read.out0, coder=WindowedValueCoder[TupleCoder[LengthPrefixCoder[StrUtf8Coder], IterableCoder[VarIntCoder]]], len(consumers)=1]]>
test_1      | WARNING:root:Mime types are not supported. Got non-default mime_type: text/plain
datanode_1  | 18/05/09 16:44:04 INFO datanode.webhdfs: 172.18.0.4 PUT /webhdfs/v1/beam-temp-py-wordcount-integration-2df498c853a811e8b61f0242ac120004/808937df-ac77-4dce-9f83-e53b875600c4.py-wordcount-integration?op=CREATE&user.name=root&namenoderpcaddress=namenode:8020&createflag=&createparent=true&overwrite=false&user.name=root 201
test_1      | INFO:root:finish <DataInputOperation group/Read receivers=[ConsumerSet[group/Read.out0, coder=WindowedValueCoder[TupleCoder[LengthPrefixCoder[StrUtf8Coder], IterableCoder[VarIntCoder]]], len(consumers)=1]]>
test_1      | INFO:root:finish <DoOperation count output_tags=['out'], receivers=[ConsumerSet[count.out0, coder=WindowedValueCoder[TupleCoder[StrUtf8Coder, FastPrimitivesCoder]], len(consumers)=1]]>
test_1      | INFO:root:finish <DoOperation format output_tags=['out'], receivers=[ConsumerSet[format.out0, coder=WindowedValueCoder[FastPrimitivesCoder], len(consumers)=1]]>
test_1      | INFO:root:finish <DoOperation write/Write/WriteImpl/WriteBundles output_tags=['out'], receivers=[ConsumerSet[write/Write/WriteImpl/WriteBundles.out0, coder=WindowedValueCoder[FastPrimitivesCoder], len(consumers)=1]]>
namenode_1  | 18/05/09 16:44:04 INFO hdfs.StateChange: BLOCK* allocate blk_1073741826_1002, replicas=172.18.0.3:50010 for /beam-temp-py-wordcount-integration-2df498c853a811e8b61f0242ac120004/808937df-ac77-4dce-9f83-e53b875600c4.py-wordcount-integration
datanode_1  | 18/05/09 16:44:04 INFO datanode.DataNode: Receiving BP-1005096461-172.18.0.2-1525884191852:blk_1073741826_1002 src: /172.18.0.3:39656 dest: /172.18.0.3:50010
datanode_1  | 18/05/09 16:44:04 INFO DataNode.clienttrace: src: /172.18.0.3:39656, dest: /172.18.0.3:50010, bytes: 48944, op: HDFS_WRITE, cliID: DFSClient_NONMAPREDUCE_1467892703_69, offset: 0, srvID: 8707ff19-88bb-42ee-8b57-c46c2d20e77c, blockid: BP-1005096461-172.18.0.2-1525884191852:blk_1073741826_1002, duration: 4248853
datanode_1  | 18/05/09 16:44:04 INFO datanode.DataNode: PacketResponder: BP-1005096461-172.18.0.2-1525884191852:blk_1073741826_1002, type=LAST_IN_PIPELINE terminating
namenode_1  | 18/05/09 16:44:04 INFO hdfs.StateChange: DIR* completeFile: /beam-temp-py-wordcount-integration-2df498c853a811e8b61f0242ac120004/808937df-ac77-4dce-9f83-e53b875600c4.py-wordcount-integration is closed by DFSClient_NONMAPREDUCE_1467892703_69
test_1      | INFO:root:finish <DoOperation write/Write/WriteImpl/Pair output_tags=['out'], receivers=[ConsumerSet[write/Write/WriteImpl/Pair.out0, coder=WindowedValueCoder[TupleCoder[FastPrimitivesCoder, FastPrimitivesCoder]], len(consumers)=1]]>
test_1      | INFO:root:finish <DoOperation write/Write/WriteImpl/WindowInto(WindowIntoFn) output_tags=['out'], receivers=[ConsumerSet[write/Write/WriteImpl/WindowInto(WindowIntoFn).out0, coder=WindowedValueCoder[TupleCoder[LengthPrefixCoder[FastPrimitivesCoder], LengthPrefixCoder[FastPrimitivesCoder]]], len(consumers)=1]]>
test_1      | INFO:root:finish <DataOutputOperation write/Write/WriteImpl/GroupByKey/Write >
test_1      | INFO:root:Running (write/Write/WriteImpl/GroupByKey/Read)+((ref_AppliedPTransform_write/Write/WriteImpl/Extract_25)+(ref_PCollection_PCollection_17/Write))
test_1      | INFO:root:start <DataOutputOperation ref_PCollection_PCollection_17/Write >
test_1      | INFO:root:start <DoOperation write/Write/WriteImpl/Extract output_tags=['out']>
test_1      | INFO:root:start <DataInputOperation write/Write/WriteImpl/GroupByKey/Read receivers=[ConsumerSet[write/Write/WriteImpl/GroupByKey/Read.out0, coder=WindowedValueCoder[TupleCoder[LengthPrefixCoder[FastPrimitivesCoder], IterableCoder[LengthPrefixCoder[FastPrimitivesCoder]]]], len(consumers)=1]]>
test_1      | INFO:root:finish <DataInputOperation write/Write/WriteImpl/GroupByKey/Read receivers=[ConsumerSet[write/Write/WriteImpl/GroupByKey/Read.out0, coder=WindowedValueCoder[TupleCoder[LengthPrefixCoder[FastPrimitivesCoder], IterableCoder[LengthPrefixCoder[FastPrimitivesCoder]]]], len(consumers)=1]]>
test_1      | INFO:root:finish <DoOperation write/Write/WriteImpl/Extract output_tags=['out'], receivers=[ConsumerSet[write/Write/WriteImpl/Extract.out0, coder=WindowedValueCoder[FastPrimitivesCoder], len(consumers)=1]]>
test_1      | INFO:root:finish <DataOutputOperation ref_PCollection_PCollection_17/Write >
test_1      | INFO:root:Running ((ref_PCollection_PCollection_9/Read)+(ref_AppliedPTransform_write/Write/WriteImpl/PreFinalize_26))+(ref_PCollection_PCollection_18/Write)
test_1      | INFO:root:start <DataOutputOperation ref_PCollection_PCollection_18/Write >
test_1      | INFO:root:start <DoOperation write/Write/WriteImpl/PreFinalize output_tags=['out']>
test_1      | INFO:root:start <DataInputOperation ref_PCollection_PCollection_9/Read receivers=[ConsumerSet[ref_PCollection_PCollection_9/Read.out0, coder=WindowedValueCoder[FastPrimitivesCoder], len(consumers)=1]]>
test_1      | INFO:root:finish <DataInputOperation ref_PCollection_PCollection_9/Read receivers=[ConsumerSet[ref_PCollection_PCollection_9/Read.out0, coder=WindowedValueCoder[FastPrimitivesCoder], len(consumers)=1]]>
test_1      | INFO:root:finish <DoOperation write/Write/WriteImpl/PreFinalize output_tags=['out'], receivers=[ConsumerSet[write/Write/WriteImpl/PreFinalize.out0, coder=WindowedValueCoder[FastPrimitivesCoder], len(consumers)=1]]>
test_1      | INFO:root:finish <DataOutputOperation ref_PCollection_PCollection_18/Write >
test_1      | INFO:root:Running (ref_PCollection_PCollection_9/Read)+(ref_AppliedPTransform_write/Write/WriteImpl/FinalizeWrite_27)
test_1      | INFO:root:start <DoOperation write/Write/WriteImpl/FinalizeWrite output_tags=['out']>
test_1      | INFO:root:start <DataInputOperation ref_PCollection_PCollection_9/Read receivers=[ConsumerSet[ref_PCollection_PCollection_9/Read.out0, coder=WindowedValueCoder[FastPrimitivesCoder], len(consumers)=1]]>
test_1      | INFO:root:Starting finalize_write threads with num_shards: 1 (skipped: 0), batches: 1, num_threads: 1
test_1      | INFO:root:Renamed 1 shards in 0.14 seconds.
test_1      | INFO:root:finish <DataInputOperation ref_PCollection_PCollection_9/Read receivers=[ConsumerSet[ref_PCollection_PCollection_9/Read.out0, coder=WindowedValueCoder[FastPrimitivesCoder], len(consumers)=1]]>
test_1      | INFO:root:finish <DoOperation write/Write/WriteImpl/FinalizeWrite output_tags=['out'], receivers=[ConsumerSet[write/Write/WriteImpl/FinalizeWrite.out0, coder=WindowedValueCoder[FastPrimitivesCoder], len(consumers)=0]]>
test_1      | INFO:root:number of empty lines: 1663
test_1      | INFO:root:average word length: 4
hdfs_it-jenkins-beam_postcommit_python_verify-4907_test_1 exited with code 0
Stopping hdfs_it-jenkins-beam_postcommit_python_verify-4907_datanode_1 ... 
Stopping hdfs_it-jenkins-beam_postcommit_python_verify-4907_namenode_1 ... 
Stopping hdfs_it-jenkins-beam_postcommit_python_verify-4907_datanode_1 ... doneStopping hdfs_it-jenkins-beam_postcommit_python_verify-4907_namenode_1 ... doneAborting on container exit...

real	1m16.887s
user	0m0.858s
sys	0m0.163s
:pythonPostCommit (Thread[Task worker for ':' Thread 3,5,main]) completed. Took 42 mins 52.027 secs.

BUILD SUCCESSFUL in 42m 57s
1 actionable task: 1 executed

Publishing build scan...
https://gradle.com/s/c3oicdbgj5dju

Build step 'Invoke Gradle script' changed build result to SUCCESS
[Set GitHub commit status (universal)] SUCCESS on repos [GHRepository@5ac4a4bb[description=Apache Beam,homepage=,name=beam,fork=false,size=58730,milestones={},language=Java,commits={},source=<null>,parent=<null>,responseHeaderFields={null=[HTTP/1.1 200 OK], Access-Control-Allow-Origin=[*], Access-Control-Expose-Headers=[ETag, Link, Retry-After, X-GitHub-OTP, X-RateLimit-Limit, X-RateLimit-Remaining, X-RateLimit-Reset, X-OAuth-Scopes, X-Accepted-OAuth-Scopes, X-Poll-Interval], Cache-Control=[private, max-age=60, s-maxage=60], Content-Encoding=[gzip], Content-Security-Policy=[default-src 'none'], Content-Type=[application/json; charset=utf-8], Date=[Wed, 09 May 2018 16:44:28 GMT], ETag=[W/"5c9d6c3689c17a1bcebc550b5488695c"], Last-Modified=[Wed, 09 May 2018 16:41:32 GMT], OkHttp-Received-Millis=[1525884268217], OkHttp-Response-Source=[NETWORK 200], OkHttp-Selected-Protocol=[http/1.1], OkHttp-Sent-Millis=[1525884268061], Referrer-Policy=[origin-when-cross-origin, strict-origin-when-cross-origin], Server=[GitHub.com], Status=[200 OK], Strict-Transport-Security=[max-age=31536000; includeSubdomains; preload], Transfer-Encoding=[chunked], Vary=[Accept, Authorization, Cookie, X-GitHub-OTP], X-Accepted-OAuth-Scopes=[repo], X-Content-Type-Options=[nosniff], X-Frame-Options=[deny], X-GitHub-Media-Type=[github.v3; format=json], X-GitHub-Request-Id=[CDFC:4F41:1181FF2:258E98F:5AF32557], X-OAuth-Scopes=[admin:repo_hook, repo, repo:status], X-RateLimit-Limit=[5000], X-RateLimit-Remaining=[4967], X-RateLimit-Reset=[1525886159], X-Runtime-rack=[0.074065], X-XSS-Protection=[1; mode=block]},url=https://api.github.com/repos/apache/beam,id=50904245]] (sha:1e40357) with context:beam_PostCommit_Python_Verify
Setting commit status on GitHub for https://github.com/apache/beam/commit/1e40357edf3e9940bb898bcd7b6c5516d685f908
ERROR: Build step failed with exception
java.io.FileNotFoundException: https://api.github.com/repos/apache/beam/statuses/1e40357edf3e9940bb898bcd7b6c5516d685f908
	at com.squareup.okhttp.internal.huc.HttpURLConnectionImpl.getInputStream(HttpURLConnectionImpl.java:243)
	at com.squareup.okhttp.internal.huc.DelegatingHttpsURLConnection.getInputStream(DelegatingHttpsURLConnection.java:210)
	at com.squareup.okhttp.internal.huc.HttpsURLConnectionImpl.getInputStream(HttpsURLConnectionImpl.java:25)
	at org.kohsuke.github.Requester.parse(Requester.java:612)
	at org.kohsuke.github.Requester.parse(Requester.java:594)
	at org.kohsuke.github.Requester._to(Requester.java:272)
Caused: org.kohsuke.github.GHFileNotFoundException: {"message":"Not Found","documentation_url":"https://developer.github.com/v3/repos/statuses/#create-a-status"}
	at org.kohsuke.github.Requester.handleApiError(Requester.java:686)
	at org.kohsuke.github.Requester._to(Requester.java:293)
	at org.kohsuke.github.Requester.to(Requester.java:234)
	at org.kohsuke.github.GHRepository.createCommitStatus(GHRepository.java:1075)
	at org.jenkinsci.plugins.github.status.GitHubCommitStatusSetter.perform(GitHubCommitStatusSetter.java:160)
Caused: org.jenkinsci.plugins.github.common.CombineErrorHandler$ErrorHandlingException
	at org.jenkinsci.plugins.github.common.CombineErrorHandler.handle(CombineErrorHandler.java:74)
	at org.jenkinsci.plugins.github.status.GitHubCommitStatusSetter.perform(GitHubCommitStatusSetter.java:164)
	at com.cloudbees.jenkins.GitHubCommitNotifier.perform(GitHubCommitNotifier.java:151)
	at hudson.tasks.BuildStepCompatibilityLayer.perform(BuildStepCompatibilityLayer.java:81)
	at hudson.tasks.BuildStepMonitor$1.perform(BuildStepMonitor.java:20)
	at hudson.model.AbstractBuild$AbstractBuildExecution.perform(AbstractBuild.java:744)
	at hudson.model.AbstractBuild$AbstractBuildExecution.performAllBuildSteps(AbstractBuild.java:690)
	at hudson.model.Build$BuildExecution.post2(Build.java:186)
	at hudson.model.AbstractBuild$AbstractBuildExecution.post(AbstractBuild.java:635)
	at hudson.model.Run.execute(Run.java:1749)
	at hudson.model.FreeStyleBuild.run(FreeStyleBuild.java:43)
	at hudson.model.ResourceController.execute(ResourceController.java:97)
	at hudson.model.Executor.run(Executor.java:429)
Build step 'Set build status on GitHub commit [deprecated]' marked build as failure


Build failed in Jenkins: beam_PostCommit_Python_Verify #4906

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_Python_Verify/4906/display/redirect>

------------------------------------------
[...truncated 1.27 MB...]
namenode_1  | 18/05/09 16:00:02 INFO namenode.FSNamesystem: dfs.namenode.safemode.extension     = 30000
namenode_1  | 18/05/09 16:00:02 INFO metrics.TopMetrics: NNTop conf: dfs.namenode.top.window.num.buckets = 10
namenode_1  | 18/05/09 16:00:02 INFO metrics.TopMetrics: NNTop conf: dfs.namenode.top.num.users = 10
namenode_1  | 18/05/09 16:00:02 INFO metrics.TopMetrics: NNTop conf: dfs.namenode.top.windows.minutes = 1,5,25
namenode_1  | 18/05/09 16:00:02 INFO namenode.FSNamesystem: Retry cache on namenode is enabled
namenode_1  | 18/05/09 16:00:02 INFO namenode.FSNamesystem: Retry cache will use 0.03 of total heap and retry cache entry expiry time is 600000 millis
namenode_1  | 18/05/09 16:00:02 INFO util.GSet: Computing capacity for map NameNodeRetryCache
namenode_1  | 18/05/09 16:00:02 INFO util.GSet: VM type       = 64-bit
namenode_1  | 18/05/09 16:00:02 INFO util.GSet: 0.029999999329447746% max memory 958.5 MB = 294.5 KB
namenode_1  | 18/05/09 16:00:02 INFO util.GSet: capacity      = 2^15 = 32768 entries
namenode_1  | 18/05/09 16:00:02 INFO common.Storage: Lock on /hadoop/dfs/name/in_use.lock acquired by nodename 143@namenode
namenode_1  | 18/05/09 16:00:02 INFO namenode.FileJournalManager: Recovering unfinalized segments in /hadoop/dfs/name/current
namenode_1  | 18/05/09 16:00:02 INFO namenode.FSImage: No edit log streams selected.
namenode_1  | 18/05/09 16:00:02 INFO namenode.FSImage: Planning to load image: FSImageFile(file=/hadoop/dfs/name/current/fsimage_0000000000000000000, cpktTxId=0000000000000000000)
namenode_1  | 18/05/09 16:00:03 INFO namenode.FSImageFormatPBINode: Loading 1 INodes.
namenode_1  | 18/05/09 16:00:03 INFO namenode.FSImageFormatProtobuf: Loaded FSImage in 0 seconds.
namenode_1  | 18/05/09 16:00:03 INFO namenode.FSImage: Loaded image for txid 0 from /hadoop/dfs/name/current/fsimage_0000000000000000000
namenode_1  | 18/05/09 16:00:03 INFO namenode.FSNamesystem: Need to save fs image? false (staleImage=false, haEnabled=false, isRollingUpgrade=false)
namenode_1  | 18/05/09 16:00:03 INFO namenode.FSEditLog: Starting log segment at 1
namenode_1  | 18/05/09 16:00:03 INFO namenode.NameCache: initialized with 0 entries 0 lookups
namenode_1  | 18/05/09 16:00:03 INFO namenode.FSNamesystem: Finished loading FSImage in 366 msecs
datanode_1  | 18/05/09 16:00:03 INFO ipc.Client: Retrying connect to server: namenode/172.18.0.2:8020. Already tried 0 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1000 MILLISECONDS)
namenode_1  | 18/05/09 16:00:03 INFO namenode.NameNode: RPC server is binding to 0.0.0.0:8020
namenode_1  | 18/05/09 16:00:03 INFO ipc.CallQueueManager: Using callQueue: class java.util.concurrent.LinkedBlockingQueue queueCapacity: 1000 scheduler: class org.apache.hadoop.ipc.DefaultRpcScheduler
namenode_1  | 18/05/09 16:00:03 INFO ipc.Server: Starting Socket Reader #1 for port 8020
namenode_1  | 18/05/09 16:00:03 INFO namenode.FSNamesystem: Registered FSNamesystemState MBean
namenode_1  | 18/05/09 16:00:03 INFO namenode.LeaseManager: Number of blocks under construction: 0
namenode_1  | 18/05/09 16:00:03 INFO blockmanagement.BlockManager: initializing replication queues
namenode_1  | 18/05/09 16:00:03 INFO hdfs.StateChange: STATE* Leaving safe mode after 0 secs
namenode_1  | 18/05/09 16:00:03 INFO hdfs.StateChange: STATE* Network topology has 0 racks and 0 datanodes
namenode_1  | 18/05/09 16:00:03 INFO hdfs.StateChange: STATE* UnderReplicatedBlocks has 0 blocks
namenode_1  | 18/05/09 16:00:03 INFO blockmanagement.BlockManager: Total number of blocks            = 0
namenode_1  | 18/05/09 16:00:03 INFO blockmanagement.BlockManager: Number of invalid blocks          = 0
namenode_1  | 18/05/09 16:00:03 INFO blockmanagement.BlockManager: Number of under-replicated blocks = 0
namenode_1  | 18/05/09 16:00:03 INFO blockmanagement.BlockManager: Number of  over-replicated blocks = 0
namenode_1  | 18/05/09 16:00:03 INFO blockmanagement.BlockManager: Number of blocks being written    = 0
namenode_1  | 18/05/09 16:00:03 INFO hdfs.StateChange: STATE* Replication Queue initialization scan for invalid, over- and under-replicated blocks completed in 12 msec
namenode_1  | 18/05/09 16:00:03 INFO ipc.Server: IPC Server Responder: starting
namenode_1  | 18/05/09 16:00:03 INFO ipc.Server: IPC Server listener on 8020: starting
namenode_1  | 18/05/09 16:00:03 INFO namenode.NameNode: NameNode RPC up at: namenode/172.18.0.2:8020
namenode_1  | 18/05/09 16:00:03 INFO namenode.FSNamesystem: Starting services required for active state
namenode_1  | 18/05/09 16:00:03 INFO namenode.FSDirectory: Initializing quota with 4 thread(s)
namenode_1  | 18/05/09 16:00:03 INFO namenode.FSDirectory: Quota initialization completed in 4 milliseconds
namenode_1  | name space=1
namenode_1  | storage space=0
namenode_1  | storage types=RAM_DISK=0, SSD=0, DISK=0, ARCHIVE=0
namenode_1  | 18/05/09 16:00:03 INFO blockmanagement.CacheReplicationMonitor: Starting CacheReplicationMonitor with interval 30000 milliseconds
datanode_1  | 18/05/09 16:00:04 INFO ipc.Client: Retrying connect to server: namenode/172.18.0.2:8020. Already tried 1 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1000 MILLISECONDS)
datanode_1  | 18/05/09 16:00:04 INFO datanode.DataNode: Acknowledging ACTIVE Namenode during handshakeBlock pool <registering> (Datanode Uuid unassigned) service to namenode/172.18.0.2:8020
datanode_1  | 18/05/09 16:00:04 INFO common.Storage: Using 1 threads to upgrade data directories (dfs.datanode.parallel.volumes.load.threads.num=1, dataDirs=1)
datanode_1  | 18/05/09 16:00:04 INFO common.Storage: Lock on /hadoop/dfs/data/in_use.lock acquired by nodename 83@datanode
datanode_1  | 18/05/09 16:00:04 INFO common.Storage: Storage directory /hadoop/dfs/data is not formatted for namespace 1379155504. Formatting...
datanode_1  | 18/05/09 16:00:04 INFO common.Storage: Generated new storageID DS-c4c76f38-0eae-4ec2-b61a-f4836063d612 for directory /hadoop/dfs/data
datanode_1  | 18/05/09 16:00:04 INFO common.Storage: Analyzing storage directories for bpid BP-2094248745-172.18.0.2-1525881600893
datanode_1  | 18/05/09 16:00:04 INFO common.Storage: Locking is disabled for /hadoop/dfs/data/current/BP-2094248745-172.18.0.2-1525881600893
datanode_1  | 18/05/09 16:00:04 INFO common.Storage: Block pool storage directory /hadoop/dfs/data/current/BP-2094248745-172.18.0.2-1525881600893 is not formatted for BP-2094248745-172.18.0.2-1525881600893. Formatting ...
datanode_1  | 18/05/09 16:00:04 INFO common.Storage: Formatting block pool BP-2094248745-172.18.0.2-1525881600893 directory /hadoop/dfs/data/current/BP-2094248745-172.18.0.2-1525881600893/current
datanode_1  | 18/05/09 16:00:04 INFO datanode.DataNode: Setting up storage: nsid=1379155504;bpid=BP-2094248745-172.18.0.2-1525881600893;lv=-57;nsInfo=lv=-63;cid=CID-86c2db2e-fc0e-49c3-9352-7ee4ce0ccb15;nsid=1379155504;c=1525881600893;bpid=BP-2094248745-172.18.0.2-1525881600893;dnuuid=null
datanode_1  | 18/05/09 16:00:04 INFO datanode.DataNode: Generated and persisted new Datanode UUID f19545d6-b924-4985-87b2-93c71002a0ff
datanode_1  | 18/05/09 16:00:04 INFO impl.FsDatasetImpl: Added new volume: DS-c4c76f38-0eae-4ec2-b61a-f4836063d612
datanode_1  | 18/05/09 16:00:04 INFO impl.FsDatasetImpl: Added volume - /hadoop/dfs/data/current, StorageType: DISK
datanode_1  | 18/05/09 16:00:04 INFO impl.FsDatasetImpl: Registered FSDatasetState MBean
datanode_1  | 18/05/09 16:00:04 INFO impl.FsDatasetImpl: Volume reference is released.
datanode_1  | 18/05/09 16:00:04 INFO impl.FsDatasetImpl: Adding block pool BP-2094248745-172.18.0.2-1525881600893
datanode_1  | 18/05/09 16:00:04 INFO impl.FsDatasetImpl: Scanning block pool BP-2094248745-172.18.0.2-1525881600893 on volume /hadoop/dfs/data/current...
datanode_1  | 18/05/09 16:00:04 INFO impl.FsDatasetImpl: Time taken to scan block pool BP-2094248745-172.18.0.2-1525881600893 on /hadoop/dfs/data/current: 17ms
datanode_1  | 18/05/09 16:00:04 INFO impl.FsDatasetImpl: Total time to scan all replicas for block pool BP-2094248745-172.18.0.2-1525881600893: 19ms
datanode_1  | 18/05/09 16:00:04 INFO impl.FsDatasetImpl: Adding replicas to map for block pool BP-2094248745-172.18.0.2-1525881600893 on volume /hadoop/dfs/data/current...
datanode_1  | 18/05/09 16:00:04 INFO impl.BlockPoolSlice: Replica Cache file: /hadoop/dfs/data/current/BP-2094248745-172.18.0.2-1525881600893/current/replicas doesn't exist 
datanode_1  | 18/05/09 16:00:04 INFO impl.FsDatasetImpl: Time to add replicas to map for block pool BP-2094248745-172.18.0.2-1525881600893 on volume /hadoop/dfs/data/current: 0ms
datanode_1  | 18/05/09 16:00:04 INFO impl.FsDatasetImpl: Total time to add all replicas to map: 2ms
datanode_1  | 18/05/09 16:00:04 INFO datanode.VolumeScanner: Now scanning bpid BP-2094248745-172.18.0.2-1525881600893 on volume /hadoop/dfs/data
datanode_1  | 18/05/09 16:00:04 INFO datanode.VolumeScanner: VolumeScanner(/hadoop/dfs/data, DS-c4c76f38-0eae-4ec2-b61a-f4836063d612): finished scanning block pool BP-2094248745-172.18.0.2-1525881600893
datanode_1  | 18/05/09 16:00:04 INFO datanode.DirectoryScanner: Periodic Directory Tree Verification scan starting at 5/9/18 4:14 PM with interval of 21600000ms
datanode_1  | 18/05/09 16:00:04 INFO datanode.DataNode: Block pool BP-2094248745-172.18.0.2-1525881600893 (Datanode Uuid f19545d6-b924-4985-87b2-93c71002a0ff) service to namenode/172.18.0.2:8020 beginning handshake with NN
datanode_1  | 18/05/09 16:00:04 INFO datanode.VolumeScanner: VolumeScanner(/hadoop/dfs/data, DS-c4c76f38-0eae-4ec2-b61a-f4836063d612): no suitable block pools found to scan.  Waiting 1814399970 ms.
namenode_1  | 18/05/09 16:00:04 INFO hdfs.StateChange: BLOCK* registerDatanode: from DatanodeRegistration(172.18.0.3:50010, datanodeUuid=f19545d6-b924-4985-87b2-93c71002a0ff, infoPort=50075, infoSecurePort=0, ipcPort=50020, storageInfo=lv=-57;cid=CID-86c2db2e-fc0e-49c3-9352-7ee4ce0ccb15;nsid=1379155504;c=1525881600893) storage f19545d6-b924-4985-87b2-93c71002a0ff
namenode_1  | 18/05/09 16:00:04 INFO net.NetworkTopology: Adding a new node: /default-rack/172.18.0.3:50010
namenode_1  | 18/05/09 16:00:04 INFO blockmanagement.BlockReportLeaseManager: Registered DN f19545d6-b924-4985-87b2-93c71002a0ff (172.18.0.3:50010).
datanode_1  | 18/05/09 16:00:04 INFO datanode.DataNode: Block pool Block pool BP-2094248745-172.18.0.2-1525881600893 (Datanode Uuid f19545d6-b924-4985-87b2-93c71002a0ff) service to namenode/172.18.0.2:8020 successfully registered with NN
datanode_1  | 18/05/09 16:00:04 INFO datanode.DataNode: For namenode namenode/172.18.0.2:8020 using BLOCKREPORT_INTERVAL of 21600000msec CACHEREPORT_INTERVAL of 10000msec Initial delay: 0msec; heartBeatInterval=3000
namenode_1  | 18/05/09 16:00:04 INFO blockmanagement.DatanodeDescriptor: Adding new storage ID DS-c4c76f38-0eae-4ec2-b61a-f4836063d612 for DN 172.18.0.3:50010
namenode_1  | 18/05/09 16:00:04 INFO BlockStateChange: BLOCK* processReport 0x82829e59da1b81c3: Processing first storage report for DS-c4c76f38-0eae-4ec2-b61a-f4836063d612 from datanode f19545d6-b924-4985-87b2-93c71002a0ff
namenode_1  | 18/05/09 16:00:04 INFO BlockStateChange: BLOCK* processReport 0x82829e59da1b81c3: from storage DS-c4c76f38-0eae-4ec2-b61a-f4836063d612 node DatanodeRegistration(172.18.0.3:50010, datanodeUuid=f19545d6-b924-4985-87b2-93c71002a0ff, infoPort=50075, infoSecurePort=0, ipcPort=50020, storageInfo=lv=-57;cid=CID-86c2db2e-fc0e-49c3-9352-7ee4ce0ccb15;nsid=1379155504;c=1525881600893), blocks: 0, hasStaleStorage: false, processing time: 2 msecs, invalidatedBlocks: 0
datanode_1  | 18/05/09 16:00:04 INFO datanode.DataNode: Successfully sent block report 0x82829e59da1b81c3,  containing 1 storage report(s), of which we sent 1. The reports had 0 total blocks and used 1 RPC(s). This took 5 msec to generate and 55 msecs for RPC and NN processing. Got back one command: FinalizeCommand/5.
datanode_1  | 18/05/09 16:00:04 INFO datanode.DataNode: Got finalize command for block pool BP-2094248745-172.18.0.2-1525881600893
test_1      | INFO	Instantiated configuration from '/app/sdks/python/apache_beam/io/hdfs_integration_test/hdfscli.cfg'.
test_1      | INFO	Instantiated <InsecureClient(url='http://namenode:50070')>.
test_1      | INFO	Uploading 'kinglear.txt' to '/'.
test_1      | DEBUG	Resolved path '/' to '/'.
test_1      | INFO	Listing '/'.
test_1      | DEBUG	Resolved path '/' to '/'.
test_1      | DEBUG	Resolved path '/' to '/'.
test_1      | DEBUG	Starting new HTTP connection (1): namenode
namenode_1  | May 09, 2018 4:00:48 PM com.sun.jersey.api.core.PackagesResourceConfig init
namenode_1  | INFO: Scanning for root resource and provider classes in the packages:
namenode_1  |   org.apache.hadoop.hdfs.server.namenode.web.resources
namenode_1  |   org.apache.hadoop.hdfs.web.resources
namenode_1  | May 09, 2018 4:00:48 PM com.sun.jersey.api.core.ScanningResourceConfig logClasses
namenode_1  | INFO: Root resource classes found:
namenode_1  |   class org.apache.hadoop.hdfs.server.namenode.web.resources.NamenodeWebHdfsMethods
namenode_1  | May 09, 2018 4:00:48 PM com.sun.jersey.api.core.ScanningResourceConfig logClasses
namenode_1  | INFO: Provider classes found:
namenode_1  |   class org.apache.hadoop.hdfs.web.resources.UserProvider
namenode_1  |   class org.apache.hadoop.hdfs.web.resources.ExceptionHandler
namenode_1  | May 09, 2018 4:00:48 PM com.sun.jersey.server.impl.application.WebApplicationImpl _initiate
namenode_1  | INFO: Initiating Jersey application, version 'Jersey: 1.9 09/02/2011 11:17 AM'
namenode_1  | May 09, 2018 4:00:49 PM com.sun.jersey.spi.inject.Errors processErrorMessages
namenode_1  | WARNING: The following warnings have been detected with resource and/or provider classes:
namenode_1  |   WARNING: A sub-resource method, public javax.ws.rs.core.Response org.apache.hadoop.hdfs.server.namenode.web.resources.NamenodeWebHdfsMethods.deleteRoot(org.apache.hadoop.security.UserGroupInformation,org.apache.hadoop.hdfs.web.resources.DelegationParam,org.apache.hadoop.hdfs.web.resources.UserParam,org.apache.hadoop.hdfs.web.resources.DoAsParam,org.apache.hadoop.hdfs.web.resources.DeleteOpParam,org.apache.hadoop.hdfs.web.resources.RecursiveParam,org.apache.hadoop.hdfs.web.resources.SnapshotNameParam) throws java.io.IOException,java.lang.InterruptedException, with URI template, "/", is treated as a resource method
namenode_1  |   WARNING: A sub-resource method, public javax.ws.rs.core.Response org.apache.hadoop.hdfs.server.namenode.web.resources.NamenodeWebHdfsMethods.putRoot(org.apache.hadoop.security.UserGroupInformation,org.apache.hadoop.hdfs.web.resources.DelegationParam,org.apache.hadoop.hdfs.web.resources.UserParam,org.apache.hadoop.hdfs.web.resources.DoAsParam,org.apache.hadoop.hdfs.web.resources.PutOpParam,org.apache.hadoop.hdfs.web.resources.DestinationParam,org.apache.hadoop.hdfs.web.resources.OwnerParam,org.apache.hadoop.hdfs.web.resources.GroupParam,org.apache.hadoop.hdfs.web.resources.PermissionParam,org.apache.hadoop.hdfs.web.resources.OverwriteParam,org.apache.hadoop.hdfs.web.resources.BufferSizeParam,org.apache.hadoop.hdfs.web.resources.ReplicationParam,org.apache.hadoop.hdfs.web.resources.BlockSizeParam,org.apache.hadoop.hdfs.web.resources.ModificationTimeParam,org.apache.hadoop.hdfs.web.resources.AccessTimeParam,org.apache.hadoop.hdfs.web.resources.RenameOptionSetParam,org.apache.hadoop.hdfs.web.resources.CreateParentParam,org.apache.hadoop.hdfs.web.resources.TokenArgumentParam,org.apache.hadoop.hdfs.web.resources.AclPermissionParam,org.apache.hadoop.hdfs.web.resources.XAttrNameParam,org.apache.hadoop.hdfs.web.resources.XAttrValueParam,org.apache.hadoop.hdfs.web.resources.XAttrSetFlagParam,org.apache.hadoop.hdfs.web.resources.SnapshotNameParam,org.apache.hadoop.hdfs.web.resources.OldSnapshotNameParam,org.apache.hadoop.hdfs.web.resources.ExcludeDatanodesParam,org.apache.hadoop.hdfs.web.resources.CreateFlagParam,org.apache.hadoop.hdfs.web.resources.StoragePolicyParam) throws java.io.IOException,java.lang.InterruptedException, with URI template, "/", is treated as a resource method
namenode_1  |   WARNING: A sub-resource method, public javax.ws.rs.core.Response org.apache.hadoop.hdfs.server.namenode.web.resources.NamenodeWebHdfsMethods.postRoot(org.apache.hadoop.security.UserGroupInformation,org.apache.hadoop.hdfs.web.resources.DelegationParam,org.apache.hadoop.hdfs.web.resources.UserParam,org.apache.hadoop.hdfs.web.resources.DoAsParam,org.apache.hadoop.hdfs.web.resources.PostOpParam,org.apache.hadoop.hdfs.web.resources.ConcatSourcesParam,org.apache.hadoop.hdfs.web.resources.BufferSizeParam,org.apache.hadoop.hdfs.web.resources.ExcludeDatanodesParam,org.apache.hadoop.hdfs.web.resources.NewLengthParam) throws java.io.IOException,java.lang.InterruptedException, with URI template, "/", is treated as a resource method
namenode_1  |   WARNING: A sub-resource method, public javax.ws.rs.core.Response org.apache.hadoop.hdfs.server.namenode.web.resources.NamenodeWebHdfsMethods.getRoot(org.apache.hadoop.security.UserGroupInformation,org.apache.hadoop.hdfs.web.resources.DelegationParam,org.apache.hadoop.hdfs.web.resources.UserParam,org.apache.hadoop.hdfs.web.resources.DoAsParam,org.apache.hadoop.hdfs.web.resources.GetOpParam,org.apache.hadoop.hdfs.web.resources.OffsetParam,org.apache.hadoop.hdfs.web.resources.LengthParam,org.apache.hadoop.hdfs.web.resources.RenewerParam,org.apache.hadoop.hdfs.web.resources.BufferSizeParam,java.util.List,org.apache.hadoop.hdfs.web.resources.XAttrEncodingParam,org.apache.hadoop.hdfs.web.resources.ExcludeDatanodesParam,org.apache.hadoop.hdfs.web.resources.FsActionParam,org.apache.hadoop.hdfs.web.resources.TokenKindParam,org.apache.hadoop.hdfs.web.resources.TokenServiceParam) throws java.io.IOException,java.lang.InterruptedException, with URI template, "/", is treated as a resource method
test_1      | DEBUG	http://namenode:50070 "GET /webhdfs/v1/?user.name=root&op=LISTSTATUS HTTP/1.1" 200 None
test_1      | DEBUG	Uploading 1 files using 1 thread(s).
test_1      | DEBUG	Uploading 'kinglear.txt' to '/kinglear.txt'.
test_1      | INFO	Writing to '/kinglear.txt'.
test_1      | DEBUG	Resolved path '/kinglear.txt' to '/kinglear.txt'.
test_1      | DEBUG	http://namenode:50070 "PUT /webhdfs/v1/kinglear.txt?user.name=root&overwrite=False&op=CREATE HTTP/1.1" 307 0
test_1      | DEBUG	Starting new HTTP connection (1): datanode
datanode_1  | 18/05/09 16:00:49 INFO datanode.webhdfs: 172.18.0.4 PUT /webhdfs/v1/kinglear.txt?op=CREATE&user.name=root&namenoderpcaddress=namenode:8020&createflag=&createparent=true&overwrite=false&user.name=root 201
namenode_1  | 18/05/09 16:00:49 INFO hdfs.StateChange: BLOCK* allocate blk_1073741825_1001, replicas=172.18.0.3:50010 for /kinglear.txt
datanode_1  | 18/05/09 16:00:50 INFO datanode.DataNode: Receiving BP-2094248745-172.18.0.2-1525881600893:blk_1073741825_1001 src: /172.18.0.3:35822 dest: /172.18.0.3:50010
datanode_1  | 18/05/09 16:00:50 INFO DataNode.clienttrace: src: /172.18.0.3:35822, dest: /172.18.0.3:50010, bytes: 157283, op: HDFS_WRITE, cliID: DFSClient_NONMAPREDUCE_-2123249494_67, offset: 0, srvID: f19545d6-b924-4985-87b2-93c71002a0ff, blockid: BP-2094248745-172.18.0.2-1525881600893:blk_1073741825_1001, duration: 14839596
datanode_1  | 18/05/09 16:00:50 INFO datanode.DataNode: PacketResponder: BP-2094248745-172.18.0.2-1525881600893:blk_1073741825_1001, type=LAST_IN_PIPELINE terminating
namenode_1  | 18/05/09 16:00:50 INFO namenode.FSNamesystem: BLOCK* blk_1073741825_1001 is COMMITTED but not COMPLETE(numNodes= 0 <  minimum = 1) in file /kinglear.txt
namenode_1  | 18/05/09 16:00:50 INFO namenode.EditLogFileOutputStream: Nothing to flush
namenode_1  | 18/05/09 16:00:50 INFO hdfs.StateChange: DIR* completeFile: /kinglear.txt is closed by DFSClient_NONMAPREDUCE_-2123249494_67
test_1      | DEBUG	Upload of 'kinglear.txt' to '/kinglear.txt' complete.
test_1      | INFO:root:Missing pipeline option (runner). Executing pipeline using the default runner: DirectRunner.
test_1      | INFO:root:==================== <function annotate_downstream_side_inputs at 0x7ff4acf225f0> ====================
test_1      | INFO:root:==================== <function lift_combiners at 0x7ff4acf22e60> ====================
test_1      | INFO:root:==================== <function expand_gbk at 0x7ff4acf22d70> ====================
test_1      | INFO:root:==================== <function sink_flattens at 0x7ff4acf22b90> ====================
test_1      | INFO:root:==================== <function greedily_fuse at 0x7ff4acf22b18> ====================
test_1      | INFO:root:==================== <function sort_stages at 0x7ff4acf228c0> ====================
test_1      | INFO:root:Running (((ref_AppliedPTransform_write/Write/WriteImpl/DoOnce/Read_16)+(ref_AppliedPTransform_write/Write/WriteImpl/InitializeWrite_17))+(ref_PCollection_PCollection_9/Write))+(ref_PCollection_PCollection_10/Write)
test_1      | INFO:root:start <DataOutputOperation ref_PCollection_PCollection_9/Write >
test_1      | INFO:root:start <DataOutputOperation ref_PCollection_PCollection_10/Write >
test_1      | INFO:root:start <DoOperation write/Write/WriteImpl/InitializeWrite output_tags=['out']>
test_1      | INFO:root:start <ReadOperation write/Write/WriteImpl/DoOnce/Read source=SourceBundle(weight=1.0, source=<apache_beam.transforms.core._CreateSource object at 0x7ff4aceeacd0>, start_position=None, stop_position=None)>
test_1      | INFO:root:finish <ReadOperation write/Write/WriteImpl/DoOnce/Read source=SourceBundle(weight=1.0, source=<apache_beam.transforms.core._CreateSource object at 0x7ff4aceeacd0>, start_position=None, stop_position=None), receivers=[ConsumerSet[write/Write/WriteImpl/DoOnce/Read.out0, coder=WindowedValueCoder[FastPrimitivesCoder], len(consumers)=2]]>
test_1      | INFO:root:finish <DoOperation write/Write/WriteImpl/InitializeWrite output_tags=['out'], receivers=[ConsumerSet[write/Write/WriteImpl/InitializeWrite.out0, coder=WindowedValueCoder[FastPrimitivesCoder], len(consumers)=1]]>
test_1      | INFO:root:finish <DataOutputOperation ref_PCollection_PCollection_10/Write >
test_1      | INFO:root:finish <DataOutputOperation ref_PCollection_PCollection_9/Write >
test_1      | INFO:root:Running (ref_AppliedPTransform_read/Read_3)+((ref_AppliedPTransform_split_4)+((ref_AppliedPTransform_pair_with_one_5)+(group/Write)))
test_1      | INFO:root:start <DataOutputOperation group/Write >
test_1      | INFO:root:start <DoOperation pair_with_one output_tags=['out']>
test_1      | INFO:root:start <DoOperation split output_tags=['out']>
test_1      | INFO:root:start <ReadOperation read/Read source=SourceBundle(weight=1.0, source=<apache_beam.io.textio._TextSource object at 0x7ff4acef98d0>, start_position=None, stop_position=None)>
datanode_1  | 18/05/09 16:00:51 INFO datanode.webhdfs: 172.18.0.4 GET /webhdfs/v1/kinglear.txt?op=OPEN&user.name=root&namenoderpcaddress=namenode:8020&length=157284&offset=0 200
test_1      | INFO:root:finish <ReadOperation read/Read source=SourceBundle(weight=1.0, source=<apache_beam.io.textio._TextSource object at 0x7ff4acef98d0>, start_position=None, stop_position=None), receivers=[ConsumerSet[read/Read.out0, coder=WindowedValueCoder[FastPrimitivesCoder], len(consumers)=1]]>
test_1      | INFO:root:finish <DoOperation split output_tags=['out'], receivers=[ConsumerSet[split.out0, coder=WindowedValueCoder[StrUtf8Coder], len(consumers)=1]]>
test_1      | INFO:root:finish <DoOperation pair_with_one output_tags=['out'], receivers=[ConsumerSet[pair_with_one.out0, coder=WindowedValueCoder[TupleCoder[LengthPrefixCoder[StrUtf8Coder], VarIntCoder]], len(consumers)=1]]>
test_1      | INFO:root:finish <DataOutputOperation group/Write >
test_1      | INFO:root:Running (((group/Read)+((ref_AppliedPTransform_count_10)+(ref_AppliedPTransform_format_11)))+(ref_AppliedPTransform_write/Write/WriteImpl/WriteBundles_18))+((ref_AppliedPTransform_write/Write/WriteImpl/Pair_19)+((ref_AppliedPTransform_write/Write/WriteImpl/WindowInto(WindowIntoFn)_20)+(write/Write/WriteImpl/GroupByKey/Write)))
test_1      | INFO:root:start <DataOutputOperation write/Write/WriteImpl/GroupByKey/Write >
test_1      | INFO:root:start <DoOperation write/Write/WriteImpl/WindowInto(WindowIntoFn) output_tags=['out']>
test_1      | INFO:root:start <DoOperation write/Write/WriteImpl/Pair output_tags=['out']>
test_1      | INFO:root:start <DoOperation write/Write/WriteImpl/WriteBundles output_tags=['out']>
test_1      | INFO:root:start <DoOperation format output_tags=['out']>
test_1      | INFO:root:start <DoOperation count output_tags=['out']>
test_1      | INFO:root:start <DataInputOperation group/Read receivers=[ConsumerSet[group/Read.out0, coder=WindowedValueCoder[TupleCoder[LengthPrefixCoder[StrUtf8Coder], IterableCoder[VarIntCoder]]], len(consumers)=1]]>
test_1      | WARNING:root:Mime types are not supported. Got non-default mime_type: text/plain
datanode_1  | 18/05/09 16:00:53 INFO datanode.webhdfs: 172.18.0.4 PUT /webhdfs/v1/beam-temp-py-wordcount-integration-259cb0b253a211e8ae160242ac120004/a178ca29-f9fa-40b0-a8a1-0dde4cdb747e.py-wordcount-integration?op=CREATE&user.name=root&namenoderpcaddress=namenode:8020&createflag=&createparent=true&overwrite=false&user.name=root 201
test_1      | INFO:root:finish <DataInputOperation group/Read receivers=[ConsumerSet[group/Read.out0, coder=WindowedValueCoder[TupleCoder[LengthPrefixCoder[StrUtf8Coder], IterableCoder[VarIntCoder]]], len(consumers)=1]]>
test_1      | INFO:root:finish <DoOperation count output_tags=['out'], receivers=[ConsumerSet[count.out0, coder=WindowedValueCoder[TupleCoder[StrUtf8Coder, FastPrimitivesCoder]], len(consumers)=1]]>
test_1      | INFO:root:finish <DoOperation format output_tags=['out'], receivers=[ConsumerSet[format.out0, coder=WindowedValueCoder[FastPrimitivesCoder], len(consumers)=1]]>
test_1      | INFO:root:finish <DoOperation write/Write/WriteImpl/WriteBundles output_tags=['out'], receivers=[ConsumerSet[write/Write/WriteImpl/WriteBundles.out0, coder=WindowedValueCoder[FastPrimitivesCoder], len(consumers)=1]]>
namenode_1  | 18/05/09 16:00:53 INFO hdfs.StateChange: BLOCK* allocate blk_1073741826_1002, replicas=172.18.0.3:50010 for /beam-temp-py-wordcount-integration-259cb0b253a211e8ae160242ac120004/a178ca29-f9fa-40b0-a8a1-0dde4cdb747e.py-wordcount-integration
datanode_1  | 18/05/09 16:00:53 INFO datanode.DataNode: Receiving BP-2094248745-172.18.0.2-1525881600893:blk_1073741826_1002 src: /172.18.0.3:35840 dest: /172.18.0.3:50010
datanode_1  | 18/05/09 16:00:53 INFO DataNode.clienttrace: src: /172.18.0.3:35840, dest: /172.18.0.3:50010, bytes: 48944, op: HDFS_WRITE, cliID: DFSClient_NONMAPREDUCE_-587725473_69, offset: 0, srvID: f19545d6-b924-4985-87b2-93c71002a0ff, blockid: BP-2094248745-172.18.0.2-1525881600893:blk_1073741826_1002, duration: 3883636
datanode_1  | 18/05/09 16:00:53 INFO datanode.DataNode: PacketResponder: BP-2094248745-172.18.0.2-1525881600893:blk_1073741826_1002, type=LAST_IN_PIPELINE terminating
namenode_1  | 18/05/09 16:00:53 INFO hdfs.StateChange: DIR* completeFile: /beam-temp-py-wordcount-integration-259cb0b253a211e8ae160242ac120004/a178ca29-f9fa-40b0-a8a1-0dde4cdb747e.py-wordcount-integration is closed by DFSClient_NONMAPREDUCE_-587725473_69
test_1      | INFO:root:finish <DoOperation write/Write/WriteImpl/Pair output_tags=['out'], receivers=[ConsumerSet[write/Write/WriteImpl/Pair.out0, coder=WindowedValueCoder[TupleCoder[FastPrimitivesCoder, FastPrimitivesCoder]], len(consumers)=1]]>
test_1      | INFO:root:finish <DoOperation write/Write/WriteImpl/WindowInto(WindowIntoFn) output_tags=['out'], receivers=[ConsumerSet[write/Write/WriteImpl/WindowInto(WindowIntoFn).out0, coder=WindowedValueCoder[TupleCoder[LengthPrefixCoder[FastPrimitivesCoder], LengthPrefixCoder[FastPrimitivesCoder]]], len(consumers)=1]]>
test_1      | INFO:root:finish <DataOutputOperation write/Write/WriteImpl/GroupByKey/Write >
test_1      | INFO:root:Running (write/Write/WriteImpl/GroupByKey/Read)+((ref_AppliedPTransform_write/Write/WriteImpl/Extract_25)+(ref_PCollection_PCollection_17/Write))
test_1      | INFO:root:start <DataOutputOperation ref_PCollection_PCollection_17/Write >
test_1      | INFO:root:start <DoOperation write/Write/WriteImpl/Extract output_tags=['out']>
test_1      | INFO:root:start <DataInputOperation write/Write/WriteImpl/GroupByKey/Read receivers=[ConsumerSet[write/Write/WriteImpl/GroupByKey/Read.out0, coder=WindowedValueCoder[TupleCoder[LengthPrefixCoder[FastPrimitivesCoder], IterableCoder[LengthPrefixCoder[FastPrimitivesCoder]]]], len(consumers)=1]]>
test_1      | INFO:root:finish <DataInputOperation write/Write/WriteImpl/GroupByKey/Read receivers=[ConsumerSet[write/Write/WriteImpl/GroupByKey/Read.out0, coder=WindowedValueCoder[TupleCoder[LengthPrefixCoder[FastPrimitivesCoder], IterableCoder[LengthPrefixCoder[FastPrimitivesCoder]]]], len(consumers)=1]]>
test_1      | INFO:root:finish <DoOperation write/Write/WriteImpl/Extract output_tags=['out'], receivers=[ConsumerSet[write/Write/WriteImpl/Extract.out0, coder=WindowedValueCoder[FastPrimitivesCoder], len(consumers)=1]]>
test_1      | INFO:root:finish <DataOutputOperation ref_PCollection_PCollection_17/Write >
test_1      | INFO:root:Running ((ref_PCollection_PCollection_9/Read)+(ref_AppliedPTransform_write/Write/WriteImpl/PreFinalize_26))+(ref_PCollection_PCollection_18/Write)
test_1      | INFO:root:start <DataOutputOperation ref_PCollection_PCollection_18/Write >
test_1      | INFO:root:start <DoOperation write/Write/WriteImpl/PreFinalize output_tags=['out']>
test_1      | INFO:root:start <DataInputOperation ref_PCollection_PCollection_9/Read receivers=[ConsumerSet[ref_PCollection_PCollection_9/Read.out0, coder=WindowedValueCoder[FastPrimitivesCoder], len(consumers)=1]]>
test_1      | INFO:root:finish <DataInputOperation ref_PCollection_PCollection_9/Read receivers=[ConsumerSet[ref_PCollection_PCollection_9/Read.out0, coder=WindowedValueCoder[FastPrimitivesCoder], len(consumers)=1]]>
test_1      | INFO:root:finish <DoOperation write/Write/WriteImpl/PreFinalize output_tags=['out'], receivers=[ConsumerSet[write/Write/WriteImpl/PreFinalize.out0, coder=WindowedValueCoder[FastPrimitivesCoder], len(consumers)=1]]>
test_1      | INFO:root:finish <DataOutputOperation ref_PCollection_PCollection_18/Write >
test_1      | INFO:root:Running (ref_PCollection_PCollection_9/Read)+(ref_AppliedPTransform_write/Write/WriteImpl/FinalizeWrite_27)
test_1      | INFO:root:start <DoOperation write/Write/WriteImpl/FinalizeWrite output_tags=['out']>
test_1      | INFO:root:start <DataInputOperation ref_PCollection_PCollection_9/Read receivers=[ConsumerSet[ref_PCollection_PCollection_9/Read.out0, coder=WindowedValueCoder[FastPrimitivesCoder], len(consumers)=1]]>
test_1      | INFO:root:Starting finalize_write threads with num_shards: 1 (skipped: 0), batches: 1, num_threads: 1
test_1      | INFO:root:Renamed 1 shards in 0.14 seconds.
test_1      | INFO:root:finish <DataInputOperation ref_PCollection_PCollection_9/Read receivers=[ConsumerSet[ref_PCollection_PCollection_9/Read.out0, coder=WindowedValueCoder[FastPrimitivesCoder], len(consumers)=1]]>
test_1      | INFO:root:finish <DoOperation write/Write/WriteImpl/FinalizeWrite output_tags=['out'], receivers=[ConsumerSet[write/Write/WriteImpl/FinalizeWrite.out0, coder=WindowedValueCoder[FastPrimitivesCoder], len(consumers)=0]]>
test_1      | INFO:root:number of empty lines: 1663
test_1      | INFO:root:average word length: 4
hdfs_it-jenkins-beam_postcommit_python_verify-4906_test_1 exited with code 0
Stopping hdfs_it-jenkins-beam_postcommit_python_verify-4906_datanode_1 ... 
Stopping hdfs_it-jenkins-beam_postcommit_python_verify-4906_namenode_1 ... 
Stopping hdfs_it-jenkins-beam_postcommit_python_verify-4906_datanode_1 ... done
Stopping hdfs_it-jenkins-beam_postcommit_python_verify-4906_namenode_1 ... doneAborting on container exit...

real	1m17.007s
user	0m0.826s
sys	0m0.191s
:pythonPostCommit (Thread[Task worker for ':' Thread 5,5,main]) completed. Took 41 mins 45.021 secs.

BUILD SUCCESSFUL in 41m 50s
1 actionable task: 1 executed

Publishing build scan...
https://gradle.com/s/jmcubbnembmja

Build step 'Invoke Gradle script' changed build result to SUCCESS
[Set GitHub commit status (universal)] SUCCESS on repos [GHRepository@68de479c[description=Apache Beam,homepage=,name=beam,fork=false,size=58728,milestones={},language=Java,commits={},source=<null>,parent=<null>,responseHeaderFields={null=[HTTP/1.1 200 OK], Access-Control-Allow-Origin=[*], Access-Control-Expose-Headers=[ETag, Link, Retry-After, X-GitHub-OTP, X-RateLimit-Limit, X-RateLimit-Remaining, X-RateLimit-Reset, X-OAuth-Scopes, X-Accepted-OAuth-Scopes, X-Poll-Interval], Cache-Control=[private, max-age=60, s-maxage=60], Content-Encoding=[gzip], Content-Security-Policy=[default-src 'none'], Content-Type=[application/json; charset=utf-8], Date=[Wed, 09 May 2018 16:01:16 GMT], ETag=[W/"e8546411728fd07a5ada7baf7fc038ea"], Last-Modified=[Wed, 09 May 2018 15:39:49 GMT], OkHttp-Received-Millis=[1525881676877], OkHttp-Response-Source=[NETWORK 200], OkHttp-Selected-Protocol=[http/1.1], OkHttp-Sent-Millis=[1525881676724], Referrer-Policy=[origin-when-cross-origin, strict-origin-when-cross-origin], Server=[GitHub.com], Status=[200 OK], Strict-Transport-Security=[max-age=31536000; includeSubdomains; preload], Transfer-Encoding=[chunked], Vary=[Accept, Authorization, Cookie, X-GitHub-OTP], X-Accepted-OAuth-Scopes=[repo], X-Content-Type-Options=[nosniff], X-Frame-Options=[deny], X-GitHub-Media-Type=[github.v3; format=json], X-GitHub-Request-Id=[CA5A:4F42:152F675:2B195F8:5AF31B22], X-OAuth-Scopes=[admin:repo_hook, repo, repo:status], X-RateLimit-Limit=[5000], X-RateLimit-Remaining=[4973], X-RateLimit-Reset=[1525882523], X-Runtime-rack=[0.071247], X-XSS-Protection=[1; mode=block]},url=https://api.github.com/repos/apache/beam,id=50904245]] (sha:60f90c8) with context:beam_PostCommit_Python_Verify
Setting commit status on GitHub for https://github.com/apache/beam/commit/60f90c8dcb229c35a82c7be15e64a89678bae058
ERROR: Build step failed with exception
java.io.FileNotFoundException: https://api.github.com/repos/apache/beam/statuses/60f90c8dcb229c35a82c7be15e64a89678bae058
	at com.squareup.okhttp.internal.huc.HttpURLConnectionImpl.getInputStream(HttpURLConnectionImpl.java:243)
	at com.squareup.okhttp.internal.huc.DelegatingHttpsURLConnection.getInputStream(DelegatingHttpsURLConnection.java:210)
	at com.squareup.okhttp.internal.huc.HttpsURLConnectionImpl.getInputStream(HttpsURLConnectionImpl.java:25)
	at org.kohsuke.github.Requester.parse(Requester.java:612)
	at org.kohsuke.github.Requester.parse(Requester.java:594)
	at org.kohsuke.github.Requester._to(Requester.java:272)
Caused: org.kohsuke.github.GHFileNotFoundException: {"message":"Not Found","documentation_url":"https://developer.github.com/v3/repos/statuses/#create-a-status"}
	at org.kohsuke.github.Requester.handleApiError(Requester.java:686)
	at org.kohsuke.github.Requester._to(Requester.java:293)
	at org.kohsuke.github.Requester.to(Requester.java:234)
	at org.kohsuke.github.GHRepository.createCommitStatus(GHRepository.java:1075)
	at org.jenkinsci.plugins.github.status.GitHubCommitStatusSetter.perform(GitHubCommitStatusSetter.java:160)
Caused: org.jenkinsci.plugins.github.common.CombineErrorHandler$ErrorHandlingException
	at org.jenkinsci.plugins.github.common.CombineErrorHandler.handle(CombineErrorHandler.java:74)
	at org.jenkinsci.plugins.github.status.GitHubCommitStatusSetter.perform(GitHubCommitStatusSetter.java:164)
	at com.cloudbees.jenkins.GitHubCommitNotifier.perform(GitHubCommitNotifier.java:151)
	at hudson.tasks.BuildStepCompatibilityLayer.perform(BuildStepCompatibilityLayer.java:81)
	at hudson.tasks.BuildStepMonitor$1.perform(BuildStepMonitor.java:20)
	at hudson.model.AbstractBuild$AbstractBuildExecution.perform(AbstractBuild.java:744)
	at hudson.model.AbstractBuild$AbstractBuildExecution.performAllBuildSteps(AbstractBuild.java:690)
	at hudson.model.Build$BuildExecution.post2(Build.java:186)
	at hudson.model.AbstractBuild$AbstractBuildExecution.post(AbstractBuild.java:635)
	at hudson.model.Run.execute(Run.java:1749)
	at hudson.model.FreeStyleBuild.run(FreeStyleBuild.java:43)
	at hudson.model.ResourceController.execute(ResourceController.java:97)
	at hudson.model.Executor.run(Executor.java:429)
Build step 'Set build status on GitHub commit [deprecated]' marked build as failure


Build failed in Jenkins: beam_PostCommit_Python_Verify #4905

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/beam_PostCommit_Python_Verify/4905/display/redirect>

------------------------------------------
[...truncated 1.27 MB...]
namenode_1  | 18/05/09 09:39:56 INFO namenode.FSNamesystem: dfs.namenode.safemode.extension     = 30000
namenode_1  | 18/05/09 09:39:56 INFO metrics.TopMetrics: NNTop conf: dfs.namenode.top.window.num.buckets = 10
namenode_1  | 18/05/09 09:39:56 INFO metrics.TopMetrics: NNTop conf: dfs.namenode.top.num.users = 10
namenode_1  | 18/05/09 09:39:56 INFO metrics.TopMetrics: NNTop conf: dfs.namenode.top.windows.minutes = 1,5,25
namenode_1  | 18/05/09 09:39:56 INFO namenode.FSNamesystem: Retry cache on namenode is enabled
namenode_1  | 18/05/09 09:39:56 INFO namenode.FSNamesystem: Retry cache will use 0.03 of total heap and retry cache entry expiry time is 600000 millis
namenode_1  | 18/05/09 09:39:56 INFO util.GSet: Computing capacity for map NameNodeRetryCache
namenode_1  | 18/05/09 09:39:56 INFO util.GSet: VM type       = 64-bit
namenode_1  | 18/05/09 09:39:56 INFO util.GSet: 0.029999999329447746% max memory 958.5 MB = 294.5 KB
namenode_1  | 18/05/09 09:39:56 INFO util.GSet: capacity      = 2^15 = 32768 entries
namenode_1  | 18/05/09 09:39:56 INFO common.Storage: Lock on /hadoop/dfs/name/in_use.lock acquired by nodename 143@namenode
namenode_1  | 18/05/09 09:39:56 INFO namenode.FileJournalManager: Recovering unfinalized segments in /hadoop/dfs/name/current
namenode_1  | 18/05/09 09:39:56 INFO namenode.FSImage: No edit log streams selected.
namenode_1  | 18/05/09 09:39:56 INFO namenode.FSImage: Planning to load image: FSImageFile(file=/hadoop/dfs/name/current/fsimage_0000000000000000000, cpktTxId=0000000000000000000)
namenode_1  | 18/05/09 09:39:56 INFO namenode.FSImageFormatPBINode: Loading 1 INodes.
namenode_1  | 18/05/09 09:39:56 INFO namenode.FSImageFormatProtobuf: Loaded FSImage in 0 seconds.
namenode_1  | 18/05/09 09:39:56 INFO namenode.FSImage: Loaded image for txid 0 from /hadoop/dfs/name/current/fsimage_0000000000000000000
namenode_1  | 18/05/09 09:39:56 INFO namenode.FSNamesystem: Need to save fs image? false (staleImage=false, haEnabled=false, isRollingUpgrade=false)
namenode_1  | 18/05/09 09:39:56 INFO namenode.FSEditLog: Starting log segment at 1
namenode_1  | 18/05/09 09:39:56 INFO namenode.NameCache: initialized with 0 entries 0 lookups
namenode_1  | 18/05/09 09:39:56 INFO namenode.FSNamesystem: Finished loading FSImage in 323 msecs
datanode_1  | 18/05/09 09:39:56 INFO ipc.Client: Retrying connect to server: namenode/172.18.0.2:8020. Already tried 0 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1000 MILLISECONDS)
namenode_1  | 18/05/09 09:39:56 INFO namenode.NameNode: RPC server is binding to 0.0.0.0:8020
namenode_1  | 18/05/09 09:39:56 INFO ipc.CallQueueManager: Using callQueue: class java.util.concurrent.LinkedBlockingQueue queueCapacity: 1000 scheduler: class org.apache.hadoop.ipc.DefaultRpcScheduler
namenode_1  | 18/05/09 09:39:56 INFO ipc.Server: Starting Socket Reader #1 for port 8020
namenode_1  | 18/05/09 09:39:56 INFO namenode.FSNamesystem: Registered FSNamesystemState MBean
namenode_1  | 18/05/09 09:39:56 INFO namenode.LeaseManager: Number of blocks under construction: 0
namenode_1  | 18/05/09 09:39:56 INFO blockmanagement.BlockManager: initializing replication queues
namenode_1  | 18/05/09 09:39:56 INFO hdfs.StateChange: STATE* Leaving safe mode after 0 secs
namenode_1  | 18/05/09 09:39:56 INFO hdfs.StateChange: STATE* Network topology has 0 racks and 0 datanodes
namenode_1  | 18/05/09 09:39:56 INFO hdfs.StateChange: STATE* UnderReplicatedBlocks has 0 blocks
namenode_1  | 18/05/09 09:39:56 INFO blockmanagement.BlockManager: Total number of blocks            = 0
namenode_1  | 18/05/09 09:39:56 INFO blockmanagement.BlockManager: Number of invalid blocks          = 0
namenode_1  | 18/05/09 09:39:56 INFO blockmanagement.BlockManager: Number of under-replicated blocks = 0
namenode_1  | 18/05/09 09:39:56 INFO blockmanagement.BlockManager: Number of  over-replicated blocks = 0
namenode_1  | 18/05/09 09:39:56 INFO blockmanagement.BlockManager: Number of blocks being written    = 0
namenode_1  | 18/05/09 09:39:56 INFO hdfs.StateChange: STATE* Replication Queue initialization scan for invalid, over- and under-replicated blocks completed in 15 msec
namenode_1  | 18/05/09 09:39:56 INFO ipc.Server: IPC Server Responder: starting
namenode_1  | 18/05/09 09:39:56 INFO ipc.Server: IPC Server listener on 8020: starting
namenode_1  | 18/05/09 09:39:56 INFO namenode.NameNode: NameNode RPC up at: namenode/172.18.0.2:8020
namenode_1  | 18/05/09 09:39:56 INFO namenode.FSNamesystem: Starting services required for active state
namenode_1  | 18/05/09 09:39:56 INFO namenode.FSDirectory: Initializing quota with 4 thread(s)
namenode_1  | 18/05/09 09:39:56 INFO namenode.FSDirectory: Quota initialization completed in 5 milliseconds
namenode_1  | name space=1
namenode_1  | storage space=0
namenode_1  | storage types=RAM_DISK=0, SSD=0, DISK=0, ARCHIVE=0
namenode_1  | 18/05/09 09:39:56 INFO blockmanagement.CacheReplicationMonitor: Starting CacheReplicationMonitor with interval 30000 milliseconds
datanode_1  | 18/05/09 09:39:57 INFO ipc.Client: Retrying connect to server: namenode/172.18.0.2:8020. Already tried 1 time(s); retry policy is RetryUpToMaximumCountWithFixedSleep(maxRetries=10, sleepTime=1000 MILLISECONDS)
datanode_1  | 18/05/09 09:39:57 INFO datanode.DataNode: Acknowledging ACTIVE Namenode during handshakeBlock pool <registering> (Datanode Uuid unassigned) service to namenode/172.18.0.2:8020
datanode_1  | 18/05/09 09:39:57 INFO common.Storage: Using 1 threads to upgrade data directories (dfs.datanode.parallel.volumes.load.threads.num=1, dataDirs=1)
datanode_1  | 18/05/09 09:39:57 INFO common.Storage: Lock on /hadoop/dfs/data/in_use.lock acquired by nodename 83@datanode
datanode_1  | 18/05/09 09:39:57 INFO common.Storage: Storage directory /hadoop/dfs/data is not formatted for namespace 479329256. Formatting...
datanode_1  | 18/05/09 09:39:57 INFO common.Storage: Generated new storageID DS-5012487d-027d-46f4-9e5d-129564b1e091 for directory /hadoop/dfs/data
datanode_1  | 18/05/09 09:39:57 INFO common.Storage: Analyzing storage directories for bpid BP-1669702170-172.18.0.2-1525858794146
datanode_1  | 18/05/09 09:39:57 INFO common.Storage: Locking is disabled for /hadoop/dfs/data/current/BP-1669702170-172.18.0.2-1525858794146
datanode_1  | 18/05/09 09:39:57 INFO common.Storage: Block pool storage directory /hadoop/dfs/data/current/BP-1669702170-172.18.0.2-1525858794146 is not formatted for BP-1669702170-172.18.0.2-1525858794146. Formatting ...
datanode_1  | 18/05/09 09:39:57 INFO common.Storage: Formatting block pool BP-1669702170-172.18.0.2-1525858794146 directory /hadoop/dfs/data/current/BP-1669702170-172.18.0.2-1525858794146/current
datanode_1  | 18/05/09 09:39:57 INFO datanode.DataNode: Setting up storage: nsid=479329256;bpid=BP-1669702170-172.18.0.2-1525858794146;lv=-57;nsInfo=lv=-63;cid=CID-0cf8237d-9d2f-4b5b-8924-8daf140047a2;nsid=479329256;c=1525858794146;bpid=BP-1669702170-172.18.0.2-1525858794146;dnuuid=null
datanode_1  | 18/05/09 09:39:57 INFO datanode.DataNode: Generated and persisted new Datanode UUID c9f97050-5748-440e-84f6-940a6f631a69
datanode_1  | 18/05/09 09:39:57 INFO impl.FsDatasetImpl: Added new volume: DS-5012487d-027d-46f4-9e5d-129564b1e091
datanode_1  | 18/05/09 09:39:57 INFO impl.FsDatasetImpl: Added volume - /hadoop/dfs/data/current, StorageType: DISK
datanode_1  | 18/05/09 09:39:57 INFO impl.FsDatasetImpl: Registered FSDatasetState MBean
datanode_1  | 18/05/09 09:39:57 INFO impl.FsDatasetImpl: Volume reference is released.
datanode_1  | 18/05/09 09:39:57 INFO impl.FsDatasetImpl: Adding block pool BP-1669702170-172.18.0.2-1525858794146
datanode_1  | 18/05/09 09:39:57 INFO impl.FsDatasetImpl: Scanning block pool BP-1669702170-172.18.0.2-1525858794146 on volume /hadoop/dfs/data/current...
datanode_1  | 18/05/09 09:39:57 INFO impl.FsDatasetImpl: Time taken to scan block pool BP-1669702170-172.18.0.2-1525858794146 on /hadoop/dfs/data/current: 15ms
datanode_1  | 18/05/09 09:39:57 INFO impl.FsDatasetImpl: Total time to scan all replicas for block pool BP-1669702170-172.18.0.2-1525858794146: 17ms
datanode_1  | 18/05/09 09:39:57 INFO impl.FsDatasetImpl: Adding replicas to map for block pool BP-1669702170-172.18.0.2-1525858794146 on volume /hadoop/dfs/data/current...
datanode_1  | 18/05/09 09:39:57 INFO impl.BlockPoolSlice: Replica Cache file: /hadoop/dfs/data/current/BP-1669702170-172.18.0.2-1525858794146/current/replicas doesn't exist 
datanode_1  | 18/05/09 09:39:57 INFO impl.FsDatasetImpl: Time to add replicas to map for block pool BP-1669702170-172.18.0.2-1525858794146 on volume /hadoop/dfs/data/current: 0ms
datanode_1  | 18/05/09 09:39:57 INFO impl.FsDatasetImpl: Total time to add all replicas to map: 1ms
datanode_1  | 18/05/09 09:39:57 INFO datanode.VolumeScanner: Now scanning bpid BP-1669702170-172.18.0.2-1525858794146 on volume /hadoop/dfs/data
datanode_1  | 18/05/09 09:39:57 INFO datanode.VolumeScanner: VolumeScanner(/hadoop/dfs/data, DS-5012487d-027d-46f4-9e5d-129564b1e091): finished scanning block pool BP-1669702170-172.18.0.2-1525858794146
datanode_1  | 18/05/09 09:39:57 INFO datanode.DirectoryScanner: Periodic Directory Tree Verification scan starting at 5/9/18 2:53 PM with interval of 21600000ms
datanode_1  | 18/05/09 09:39:57 INFO datanode.DataNode: Block pool BP-1669702170-172.18.0.2-1525858794146 (Datanode Uuid c9f97050-5748-440e-84f6-940a6f631a69) service to namenode/172.18.0.2:8020 beginning handshake with NN
datanode_1  | 18/05/09 09:39:57 INFO datanode.VolumeScanner: VolumeScanner(/hadoop/dfs/data, DS-5012487d-027d-46f4-9e5d-129564b1e091): no suitable block pools found to scan.  Waiting 1814399969 ms.
namenode_1  | 18/05/09 09:39:57 INFO hdfs.StateChange: BLOCK* registerDatanode: from DatanodeRegistration(172.18.0.3:50010, datanodeUuid=c9f97050-5748-440e-84f6-940a6f631a69, infoPort=50075, infoSecurePort=0, ipcPort=50020, storageInfo=lv=-57;cid=CID-0cf8237d-9d2f-4b5b-8924-8daf140047a2;nsid=479329256;c=1525858794146) storage c9f97050-5748-440e-84f6-940a6f631a69
namenode_1  | 18/05/09 09:39:57 INFO net.NetworkTopology: Adding a new node: /default-rack/172.18.0.3:50010
namenode_1  | 18/05/09 09:39:57 INFO blockmanagement.BlockReportLeaseManager: Registered DN c9f97050-5748-440e-84f6-940a6f631a69 (172.18.0.3:50010).
datanode_1  | 18/05/09 09:39:57 INFO datanode.DataNode: Block pool Block pool BP-1669702170-172.18.0.2-1525858794146 (Datanode Uuid c9f97050-5748-440e-84f6-940a6f631a69) service to namenode/172.18.0.2:8020 successfully registered with NN
datanode_1  | 18/05/09 09:39:57 INFO datanode.DataNode: For namenode namenode/172.18.0.2:8020 using BLOCKREPORT_INTERVAL of 21600000msec CACHEREPORT_INTERVAL of 10000msec Initial delay: 0msec; heartBeatInterval=3000
namenode_1  | 18/05/09 09:39:58 INFO blockmanagement.DatanodeDescriptor: Adding new storage ID DS-5012487d-027d-46f4-9e5d-129564b1e091 for DN 172.18.0.3:50010
namenode_1  | 18/05/09 09:39:58 INFO BlockStateChange: BLOCK* processReport 0xcba83fc2b0be74b3: Processing first storage report for DS-5012487d-027d-46f4-9e5d-129564b1e091 from datanode c9f97050-5748-440e-84f6-940a6f631a69
namenode_1  | 18/05/09 09:39:58 INFO BlockStateChange: BLOCK* processReport 0xcba83fc2b0be74b3: from storage DS-5012487d-027d-46f4-9e5d-129564b1e091 node DatanodeRegistration(172.18.0.3:50010, datanodeUuid=c9f97050-5748-440e-84f6-940a6f631a69, infoPort=50075, infoSecurePort=0, ipcPort=50020, storageInfo=lv=-57;cid=CID-0cf8237d-9d2f-4b5b-8924-8daf140047a2;nsid=479329256;c=1525858794146), blocks: 0, hasStaleStorage: false, processing time: 2 msecs, invalidatedBlocks: 0
datanode_1  | 18/05/09 09:39:58 INFO datanode.DataNode: Successfully sent block report 0xcba83fc2b0be74b3,  containing 1 storage report(s), of which we sent 1. The reports had 0 total blocks and used 1 RPC(s). This took 4 msec to generate and 51 msecs for RPC and NN processing. Got back one command: FinalizeCommand/5.
datanode_1  | 18/05/09 09:39:58 INFO datanode.DataNode: Got finalize command for block pool BP-1669702170-172.18.0.2-1525858794146
test_1      | INFO	Instantiated configuration from '/app/sdks/python/apache_beam/io/hdfs_integration_test/hdfscli.cfg'.
test_1      | INFO	Instantiated <InsecureClient(url='http://namenode:50070')>.
test_1      | INFO	Uploading 'kinglear.txt' to '/'.
test_1      | DEBUG	Resolved path '/' to '/'.
test_1      | INFO	Listing '/'.
test_1      | DEBUG	Resolved path '/' to '/'.
test_1      | DEBUG	Resolved path '/' to '/'.
test_1      | DEBUG	Starting new HTTP connection (1): namenode
namenode_1  | May 09, 2018 9:40:41 AM com.sun.jersey.api.core.PackagesResourceConfig init
namenode_1  | INFO: Scanning for root resource and provider classes in the packages:
namenode_1  |   org.apache.hadoop.hdfs.server.namenode.web.resources
namenode_1  |   org.apache.hadoop.hdfs.web.resources
namenode_1  | May 09, 2018 9:40:42 AM com.sun.jersey.api.core.ScanningResourceConfig logClasses
namenode_1  | INFO: Root resource classes found:
namenode_1  |   class org.apache.hadoop.hdfs.server.namenode.web.resources.NamenodeWebHdfsMethods
namenode_1  | May 09, 2018 9:40:42 AM com.sun.jersey.api.core.ScanningResourceConfig logClasses
namenode_1  | INFO: Provider classes found:
namenode_1  |   class org.apache.hadoop.hdfs.web.resources.UserProvider
namenode_1  |   class org.apache.hadoop.hdfs.web.resources.ExceptionHandler
namenode_1  | May 09, 2018 9:40:42 AM com.sun.jersey.server.impl.application.WebApplicationImpl _initiate
namenode_1  | INFO: Initiating Jersey application, version 'Jersey: 1.9 09/02/2011 11:17 AM'
namenode_1  | May 09, 2018 9:40:42 AM com.sun.jersey.spi.inject.Errors processErrorMessages
namenode_1  | WARNING: The following warnings have been detected with resource and/or provider classes:
namenode_1  |   WARNING: A sub-resource method, public javax.ws.rs.core.Response org.apache.hadoop.hdfs.server.namenode.web.resources.NamenodeWebHdfsMethods.putRoot(org.apache.hadoop.security.UserGroupInformation,org.apache.hadoop.hdfs.web.resources.DelegationParam,org.apache.hadoop.hdfs.web.resources.UserParam,org.apache.hadoop.hdfs.web.resources.DoAsParam,org.apache.hadoop.hdfs.web.resources.PutOpParam,org.apache.hadoop.hdfs.web.resources.DestinationParam,org.apache.hadoop.hdfs.web.resources.OwnerParam,org.apache.hadoop.hdfs.web.resources.GroupParam,org.apache.hadoop.hdfs.web.resources.PermissionParam,org.apache.hadoop.hdfs.web.resources.OverwriteParam,org.apache.hadoop.hdfs.web.resources.BufferSizeParam,org.apache.hadoop.hdfs.web.resources.ReplicationParam,org.apache.hadoop.hdfs.web.resources.BlockSizeParam,org.apache.hadoop.hdfs.web.resources.ModificationTimeParam,org.apache.hadoop.hdfs.web.resources.AccessTimeParam,org.apache.hadoop.hdfs.web.resources.RenameOptionSetParam,org.apache.hadoop.hdfs.web.resources.CreateParentParam,org.apache.hadoop.hdfs.web.resources.TokenArgumentParam,org.apache.hadoop.hdfs.web.resources.AclPermissionParam,org.apache.hadoop.hdfs.web.resources.XAttrNameParam,org.apache.hadoop.hdfs.web.resources.XAttrValueParam,org.apache.hadoop.hdfs.web.resources.XAttrSetFlagParam,org.apache.hadoop.hdfs.web.resources.SnapshotNameParam,org.apache.hadoop.hdfs.web.resources.OldSnapshotNameParam,org.apache.hadoop.hdfs.web.resources.ExcludeDatanodesParam,org.apache.hadoop.hdfs.web.resources.CreateFlagParam,org.apache.hadoop.hdfs.web.resources.StoragePolicyParam) throws java.io.IOException,java.lang.InterruptedException, with URI template, "/", is treated as a resource method
namenode_1  |   WARNING: A sub-resource method, public javax.ws.rs.core.Response org.apache.hadoop.hdfs.server.namenode.web.resources.NamenodeWebHdfsMethods.deleteRoot(org.apache.hadoop.security.UserGroupInformation,org.apache.hadoop.hdfs.web.resources.DelegationParam,org.apache.hadoop.hdfs.web.resources.UserParam,org.apache.hadoop.hdfs.web.resources.DoAsParam,org.apache.hadoop.hdfs.web.resources.DeleteOpParam,org.apache.hadoop.hdfs.web.resources.RecursiveParam,org.apache.hadoop.hdfs.web.resources.SnapshotNameParam) throws java.io.IOException,java.lang.InterruptedException, with URI template, "/", is treated as a resource method
namenode_1  |   WARNING: A sub-resource method, public javax.ws.rs.core.Response org.apache.hadoop.hdfs.server.namenode.web.resources.NamenodeWebHdfsMethods.postRoot(org.apache.hadoop.security.UserGroupInformation,org.apache.hadoop.hdfs.web.resources.DelegationParam,org.apache.hadoop.hdfs.web.resources.UserParam,org.apache.hadoop.hdfs.web.resources.DoAsParam,org.apache.hadoop.hdfs.web.resources.PostOpParam,org.apache.hadoop.hdfs.web.resources.ConcatSourcesParam,org.apache.hadoop.hdfs.web.resources.BufferSizeParam,org.apache.hadoop.hdfs.web.resources.ExcludeDatanodesParam,org.apache.hadoop.hdfs.web.resources.NewLengthParam) throws java.io.IOException,java.lang.InterruptedException, with URI template, "/", is treated as a resource method
namenode_1  |   WARNING: A sub-resource method, public javax.ws.rs.core.Response org.apache.hadoop.hdfs.server.namenode.web.resources.NamenodeWebHdfsMethods.getRoot(org.apache.hadoop.security.UserGroupInformation,org.apache.hadoop.hdfs.web.resources.DelegationParam,org.apache.hadoop.hdfs.web.resources.UserParam,org.apache.hadoop.hdfs.web.resources.DoAsParam,org.apache.hadoop.hdfs.web.resources.GetOpParam,org.apache.hadoop.hdfs.web.resources.OffsetParam,org.apache.hadoop.hdfs.web.resources.LengthParam,org.apache.hadoop.hdfs.web.resources.RenewerParam,org.apache.hadoop.hdfs.web.resources.BufferSizeParam,java.util.List,org.apache.hadoop.hdfs.web.resources.XAttrEncodingParam,org.apache.hadoop.hdfs.web.resources.ExcludeDatanodesParam,org.apache.hadoop.hdfs.web.resources.FsActionParam,org.apache.hadoop.hdfs.web.resources.TokenKindParam,org.apache.hadoop.hdfs.web.resources.TokenServiceParam) throws java.io.IOException,java.lang.InterruptedException, with URI template, "/", is treated as a resource method
test_1      | DEBUG	http://namenode:50070 "GET /webhdfs/v1/?user.name=root&op=LISTSTATUS HTTP/1.1" 200 None
test_1      | DEBUG	Uploading 1 files using 1 thread(s).
test_1      | DEBUG	Uploading 'kinglear.txt' to '/kinglear.txt'.
test_1      | INFO	Writing to '/kinglear.txt'.
test_1      | DEBUG	Resolved path '/kinglear.txt' to '/kinglear.txt'.
test_1      | DEBUG	http://namenode:50070 "PUT /webhdfs/v1/kinglear.txt?user.name=root&overwrite=False&op=CREATE HTTP/1.1" 307 0
test_1      | DEBUG	Starting new HTTP connection (1): datanode
datanode_1  | 18/05/09 09:40:43 INFO datanode.webhdfs: 172.18.0.4 PUT /webhdfs/v1/kinglear.txt?op=CREATE&user.name=root&namenoderpcaddress=namenode:8020&createflag=&createparent=true&overwrite=false&user.name=root 201
namenode_1  | 18/05/09 09:40:43 INFO hdfs.StateChange: BLOCK* allocate blk_1073741825_1001, replicas=172.18.0.3:50010 for /kinglear.txt
datanode_1  | 18/05/09 09:40:43 INFO datanode.DataNode: Receiving BP-1669702170-172.18.0.2-1525858794146:blk_1073741825_1001 src: /172.18.0.3:46504 dest: /172.18.0.3:50010
datanode_1  | 18/05/09 09:40:43 INFO DataNode.clienttrace: src: /172.18.0.3:46504, dest: /172.18.0.3:50010, bytes: 157283, op: HDFS_WRITE, cliID: DFSClient_NONMAPREDUCE_1370713313_67, offset: 0, srvID: c9f97050-5748-440e-84f6-940a6f631a69, blockid: BP-1669702170-172.18.0.2-1525858794146:blk_1073741825_1001, duration: 12521850
datanode_1  | 18/05/09 09:40:43 INFO datanode.DataNode: PacketResponder: BP-1669702170-172.18.0.2-1525858794146:blk_1073741825_1001, type=LAST_IN_PIPELINE terminating
namenode_1  | 18/05/09 09:40:43 INFO namenode.FSNamesystem: BLOCK* blk_1073741825_1001 is COMMITTED but not COMPLETE(numNodes= 0 <  minimum = 1) in file /kinglear.txt
namenode_1  | 18/05/09 09:40:43 INFO namenode.EditLogFileOutputStream: Nothing to flush
namenode_1  | 18/05/09 09:40:43 INFO hdfs.StateChange: DIR* completeFile: /kinglear.txt is closed by DFSClient_NONMAPREDUCE_1370713313_67
test_1      | DEBUG	Upload of 'kinglear.txt' to '/kinglear.txt' complete.
test_1      | INFO:root:Missing pipeline option (runner). Executing pipeline using the default runner: DirectRunner.
test_1      | INFO:root:==================== <function annotate_downstream_side_inputs at 0x7f15ae9c08c0> ====================
test_1      | INFO:root:==================== <function lift_combiners at 0x7f15ae9c0e60> ====================
test_1      | INFO:root:==================== <function expand_gbk at 0x7f15ae9c0500> ====================
test_1      | INFO:root:==================== <function sink_flattens at 0x7f15ae9c0c80> ====================
test_1      | INFO:root:==================== <function greedily_fuse at 0x7f15ae9c0aa0> ====================
test_1      | INFO:root:==================== <function sort_stages at 0x7f15ae9c0758> ====================
test_1      | INFO:root:Running (((ref_AppliedPTransform_write/Write/WriteImpl/DoOnce/Read_16)+(ref_AppliedPTransform_write/Write/WriteImpl/InitializeWrite_17))+(ref_PCollection_PCollection_9/Write))+(ref_PCollection_PCollection_10/Write)
test_1      | INFO:root:start <DataOutputOperation ref_PCollection_PCollection_9/Write >
test_1      | INFO:root:start <DataOutputOperation ref_PCollection_PCollection_10/Write >
test_1      | INFO:root:start <DoOperation write/Write/WriteImpl/InitializeWrite output_tags=['out']>
test_1      | INFO:root:start <ReadOperation write/Write/WriteImpl/DoOnce/Read source=SourceBundle(weight=1.0, source=<apache_beam.transforms.core._CreateSource object at 0x7f15ae987cd0>, start_position=None, stop_position=None)>
test_1      | INFO:root:finish <ReadOperation write/Write/WriteImpl/DoOnce/Read source=SourceBundle(weight=1.0, source=<apache_beam.transforms.core._CreateSource object at 0x7f15ae987cd0>, start_position=None, stop_position=None), receivers=[ConsumerSet[write/Write/WriteImpl/DoOnce/Read.out0, coder=WindowedValueCoder[FastPrimitivesCoder], len(consumers)=2]]>
test_1      | INFO:root:finish <DoOperation write/Write/WriteImpl/InitializeWrite output_tags=['out'], receivers=[ConsumerSet[write/Write/WriteImpl/InitializeWrite.out0, coder=WindowedValueCoder[FastPrimitivesCoder], len(consumers)=1]]>
test_1      | INFO:root:finish <DataOutputOperation ref_PCollection_PCollection_10/Write >
test_1      | INFO:root:finish <DataOutputOperation ref_PCollection_PCollection_9/Write >
test_1      | INFO:root:Running (ref_AppliedPTransform_read/Read_3)+((ref_AppliedPTransform_split_4)+((ref_AppliedPTransform_pair_with_one_5)+(group/Write)))
test_1      | INFO:root:start <DataOutputOperation group/Write >
test_1      | INFO:root:start <DoOperation pair_with_one output_tags=['out']>
test_1      | INFO:root:start <DoOperation split output_tags=['out']>
test_1      | INFO:root:start <ReadOperation read/Read source=SourceBundle(weight=1.0, source=<apache_beam.io.textio._TextSource object at 0x7f15ae9988d0>, start_position=None, stop_position=None)>
datanode_1  | 18/05/09 09:40:45 INFO datanode.webhdfs: 172.18.0.4 GET /webhdfs/v1/kinglear.txt?op=OPEN&user.name=root&namenoderpcaddress=namenode:8020&length=157284&offset=0 200
test_1      | INFO:root:finish <ReadOperation read/Read source=SourceBundle(weight=1.0, source=<apache_beam.io.textio._TextSource object at 0x7f15ae9988d0>, start_position=None, stop_position=None), receivers=[ConsumerSet[read/Read.out0, coder=WindowedValueCoder[FastPrimitivesCoder], len(consumers)=1]]>
test_1      | INFO:root:finish <DoOperation split output_tags=['out'], receivers=[ConsumerSet[split.out0, coder=WindowedValueCoder[StrUtf8Coder], len(consumers)=1]]>
test_1      | INFO:root:finish <DoOperation pair_with_one output_tags=['out'], receivers=[ConsumerSet[pair_with_one.out0, coder=WindowedValueCoder[TupleCoder[LengthPrefixCoder[StrUtf8Coder], VarIntCoder]], len(consumers)=1]]>
test_1      | INFO:root:finish <DataOutputOperation group/Write >
test_1      | INFO:root:Running (((group/Read)+((ref_AppliedPTransform_count_10)+(ref_AppliedPTransform_format_11)))+(ref_AppliedPTransform_write/Write/WriteImpl/WriteBundles_18))+((ref_AppliedPTransform_write/Write/WriteImpl/Pair_19)+((ref_AppliedPTransform_write/Write/WriteImpl/WindowInto(WindowIntoFn)_20)+(write/Write/WriteImpl/GroupByKey/Write)))
test_1      | INFO:root:start <DataOutputOperation write/Write/WriteImpl/GroupByKey/Write >
test_1      | INFO:root:start <DoOperation write/Write/WriteImpl/WindowInto(WindowIntoFn) output_tags=['out']>
test_1      | INFO:root:start <DoOperation write/Write/WriteImpl/Pair output_tags=['out']>
test_1      | INFO:root:start <DoOperation write/Write/WriteImpl/WriteBundles output_tags=['out']>
test_1      | INFO:root:start <DoOperation format output_tags=['out']>
test_1      | INFO:root:start <DoOperation count output_tags=['out']>
test_1      | INFO:root:start <DataInputOperation group/Read receivers=[ConsumerSet[group/Read.out0, coder=WindowedValueCoder[TupleCoder[LengthPrefixCoder[StrUtf8Coder], IterableCoder[VarIntCoder]]], len(consumers)=1]]>
test_1      | WARNING:root:Mime types are not supported. Got non-default mime_type: text/plain
datanode_1  | 18/05/09 09:40:46 INFO datanode.webhdfs: 172.18.0.4 PUT /webhdfs/v1/beam-temp-py-wordcount-integration-0bc51330536d11e8907f0242ac120004/253adb68-e8d3-4028-bbed-bdfe82739b26.py-wordcount-integration?op=CREATE&user.name=root&namenoderpcaddress=namenode:8020&createflag=&createparent=true&overwrite=false&user.name=root 201
test_1      | INFO:root:finish <DataInputOperation group/Read receivers=[ConsumerSet[group/Read.out0, coder=WindowedValueCoder[TupleCoder[LengthPrefixCoder[StrUtf8Coder], IterableCoder[VarIntCoder]]], len(consumers)=1]]>
test_1      | INFO:root:finish <DoOperation count output_tags=['out'], receivers=[ConsumerSet[count.out0, coder=WindowedValueCoder[TupleCoder[StrUtf8Coder, FastPrimitivesCoder]], len(consumers)=1]]>
test_1      | INFO:root:finish <DoOperation format output_tags=['out'], receivers=[ConsumerSet[format.out0, coder=WindowedValueCoder[FastPrimitivesCoder], len(consumers)=1]]>
test_1      | INFO:root:finish <DoOperation write/Write/WriteImpl/WriteBundles output_tags=['out'], receivers=[ConsumerSet[write/Write/WriteImpl/WriteBundles.out0, coder=WindowedValueCoder[FastPrimitivesCoder], len(consumers)=1]]>
namenode_1  | 18/05/09 09:40:46 INFO hdfs.StateChange: BLOCK* allocate blk_1073741826_1002, replicas=172.18.0.3:50010 for /beam-temp-py-wordcount-integration-0bc51330536d11e8907f0242ac120004/253adb68-e8d3-4028-bbed-bdfe82739b26.py-wordcount-integration
datanode_1  | 18/05/09 09:40:46 INFO datanode.DataNode: Receiving BP-1669702170-172.18.0.2-1525858794146:blk_1073741826_1002 src: /172.18.0.3:46526 dest: /172.18.0.3:50010
datanode_1  | 18/05/09 09:40:46 INFO DataNode.clienttrace: src: /172.18.0.3:46526, dest: /172.18.0.3:50010, bytes: 48944, op: HDFS_WRITE, cliID: DFSClient_NONMAPREDUCE_684707775_69, offset: 0, srvID: c9f97050-5748-440e-84f6-940a6f631a69, blockid: BP-1669702170-172.18.0.2-1525858794146:blk_1073741826_1002, duration: 3962781
datanode_1  | 18/05/09 09:40:46 INFO datanode.DataNode: PacketResponder: BP-1669702170-172.18.0.2-1525858794146:blk_1073741826_1002, type=LAST_IN_PIPELINE terminating
namenode_1  | 18/05/09 09:40:46 INFO hdfs.StateChange: DIR* completeFile: /beam-temp-py-wordcount-integration-0bc51330536d11e8907f0242ac120004/253adb68-e8d3-4028-bbed-bdfe82739b26.py-wordcount-integration is closed by DFSClient_NONMAPREDUCE_684707775_69
test_1      | INFO:root:finish <DoOperation write/Write/WriteImpl/Pair output_tags=['out'], receivers=[ConsumerSet[write/Write/WriteImpl/Pair.out0, coder=WindowedValueCoder[TupleCoder[FastPrimitivesCoder, FastPrimitivesCoder]], len(consumers)=1]]>
test_1      | INFO:root:finish <DoOperation write/Write/WriteImpl/WindowInto(WindowIntoFn) output_tags=['out'], receivers=[ConsumerSet[write/Write/WriteImpl/WindowInto(WindowIntoFn).out0, coder=WindowedValueCoder[TupleCoder[LengthPrefixCoder[FastPrimitivesCoder], LengthPrefixCoder[FastPrimitivesCoder]]], len(consumers)=1]]>
test_1      | INFO:root:finish <DataOutputOperation write/Write/WriteImpl/GroupByKey/Write >
test_1      | INFO:root:Running (write/Write/WriteImpl/GroupByKey/Read)+((ref_AppliedPTransform_write/Write/WriteImpl/Extract_25)+(ref_PCollection_PCollection_17/Write))
test_1      | INFO:root:start <DataOutputOperation ref_PCollection_PCollection_17/Write >
test_1      | INFO:root:start <DoOperation write/Write/WriteImpl/Extract output_tags=['out']>
test_1      | INFO:root:start <DataInputOperation write/Write/WriteImpl/GroupByKey/Read receivers=[ConsumerSet[write/Write/WriteImpl/GroupByKey/Read.out0, coder=WindowedValueCoder[TupleCoder[LengthPrefixCoder[FastPrimitivesCoder], IterableCoder[LengthPrefixCoder[FastPrimitivesCoder]]]], len(consumers)=1]]>
test_1      | INFO:root:finish <DataInputOperation write/Write/WriteImpl/GroupByKey/Read receivers=[ConsumerSet[write/Write/WriteImpl/GroupByKey/Read.out0, coder=WindowedValueCoder[TupleCoder[LengthPrefixCoder[FastPrimitivesCoder], IterableCoder[LengthPrefixCoder[FastPrimitivesCoder]]]], len(consumers)=1]]>
test_1      | INFO:root:finish <DoOperation write/Write/WriteImpl/Extract output_tags=['out'], receivers=[ConsumerSet[write/Write/WriteImpl/Extract.out0, coder=WindowedValueCoder[FastPrimitivesCoder], len(consumers)=1]]>
test_1      | INFO:root:finish <DataOutputOperation ref_PCollection_PCollection_17/Write >
test_1      | INFO:root:Running ((ref_PCollection_PCollection_9/Read)+(ref_AppliedPTransform_write/Write/WriteImpl/PreFinalize_26))+(ref_PCollection_PCollection_18/Write)
test_1      | INFO:root:start <DataOutputOperation ref_PCollection_PCollection_18/Write >
test_1      | INFO:root:start <DoOperation write/Write/WriteImpl/PreFinalize output_tags=['out']>
test_1      | INFO:root:start <DataInputOperation ref_PCollection_PCollection_9/Read receivers=[ConsumerSet[ref_PCollection_PCollection_9/Read.out0, coder=WindowedValueCoder[FastPrimitivesCoder], len(consumers)=1]]>
test_1      | INFO:root:finish <DataInputOperation ref_PCollection_PCollection_9/Read receivers=[ConsumerSet[ref_PCollection_PCollection_9/Read.out0, coder=WindowedValueCoder[FastPrimitivesCoder], len(consumers)=1]]>
test_1      | INFO:root:finish <DoOperation write/Write/WriteImpl/PreFinalize output_tags=['out'], receivers=[ConsumerSet[write/Write/WriteImpl/PreFinalize.out0, coder=WindowedValueCoder[FastPrimitivesCoder], len(consumers)=1]]>
test_1      | INFO:root:finish <DataOutputOperation ref_PCollection_PCollection_18/Write >
test_1      | INFO:root:Running (ref_PCollection_PCollection_9/Read)+(ref_AppliedPTransform_write/Write/WriteImpl/FinalizeWrite_27)
test_1      | INFO:root:start <DoOperation write/Write/WriteImpl/FinalizeWrite output_tags=['out']>
test_1      | INFO:root:start <DataInputOperation ref_PCollection_PCollection_9/Read receivers=[ConsumerSet[ref_PCollection_PCollection_9/Read.out0, coder=WindowedValueCoder[FastPrimitivesCoder], len(consumers)=1]]>
test_1      | INFO:root:Starting finalize_write threads with num_shards: 1 (skipped: 0), batches: 1, num_threads: 1
test_1      | INFO:root:Renamed 1 shards in 0.14 seconds.
test_1      | INFO:root:finish <DataInputOperation ref_PCollection_PCollection_9/Read receivers=[ConsumerSet[ref_PCollection_PCollection_9/Read.out0, coder=WindowedValueCoder[FastPrimitivesCoder], len(consumers)=1]]>
test_1      | INFO:root:finish <DoOperation write/Write/WriteImpl/FinalizeWrite output_tags=['out'], receivers=[ConsumerSet[write/Write/WriteImpl/FinalizeWrite.out0, coder=WindowedValueCoder[FastPrimitivesCoder], len(consumers)=0]]>
test_1      | INFO:root:number of empty lines: 1663
test_1      | INFO:root:average word length: 4
hdfs_it-jenkins-beam_postcommit_python_verify-4905_test_1 exited with code 0
Stopping hdfs_it-jenkins-beam_postcommit_python_verify-4905_datanode_1 ... 
Stopping hdfs_it-jenkins-beam_postcommit_python_verify-4905_namenode_1 ... 

Stopping hdfs_it-jenkins-beam_postcommit_python_verify-4905_datanode_1 ... doneStopping hdfs_it-jenkins-beam_postcommit_python_verify-4905_namenode_1 ... doneAborting on container exit...

real	1m16.878s
user	0m0.858s
sys	0m0.168s
:pythonPostCommit (Thread[Task worker for ':' Thread 11,5,main]) completed. Took 40 mins 46.902 secs.

BUILD SUCCESSFUL in 40m 54s
1 actionable task: 1 executed

Publishing build scan...
https://gradle.com/s/splqu4pka3hcm

Build step 'Invoke Gradle script' changed build result to SUCCESS
[Set GitHub commit status (universal)] SUCCESS on repos [GHRepository@750cfe1e[description=Apache Beam,homepage=,name=beam,fork=false,size=58754,milestones={},language=Java,commits={},source=<null>,parent=<null>,responseHeaderFields={null=[HTTP/1.1 200 OK], Access-Control-Allow-Origin=[*], Access-Control-Expose-Headers=[ETag, Link, Retry-After, X-GitHub-OTP, X-RateLimit-Limit, X-RateLimit-Remaining, X-RateLimit-Reset, X-OAuth-Scopes, X-Accepted-OAuth-Scopes, X-Poll-Interval], Cache-Control=[private, max-age=60, s-maxage=60], Content-Encoding=[gzip], Content-Security-Policy=[default-src 'none'], Content-Type=[application/json; charset=utf-8], Date=[Wed, 09 May 2018 09:41:10 GMT], ETag=[W/"edc538b569b8706bee144344d3b1a471"], Last-Modified=[Wed, 09 May 2018 06:33:16 GMT], OkHttp-Received-Millis=[1525858870402], OkHttp-Response-Source=[NETWORK 200], OkHttp-Selected-Protocol=[http/1.1], OkHttp-Sent-Millis=[1525858870103], Referrer-Policy=[origin-when-cross-origin, strict-origin-when-cross-origin], Server=[GitHub.com], Status=[200 OK], Strict-Transport-Security=[max-age=31536000; includeSubdomains; preload], Transfer-Encoding=[chunked], Vary=[Accept, Authorization, Cookie, X-GitHub-OTP], X-Accepted-OAuth-Scopes=[repo], X-Content-Type-Options=[nosniff], X-Frame-Options=[deny], X-GitHub-Media-Type=[github.v3; format=json], X-GitHub-Request-Id=[904E:4F3F:73BE04:1120E01:5AF2C236], X-OAuth-Scopes=[admin:repo_hook, repo, repo:status], X-RateLimit-Limit=[5000], X-RateLimit-Remaining=[4987], X-RateLimit-Reset=[1525860010], X-Runtime-rack=[0.070231], X-XSS-Protection=[1; mode=block]},url=https://api.github.com/repos/apache/beam,id=50904245]] (sha:60f90c8) with context:beam_PostCommit_Python_Verify
Setting commit status on GitHub for https://github.com/apache/beam/commit/60f90c8dcb229c35a82c7be15e64a89678bae058
ERROR: Build step failed with exception
java.io.FileNotFoundException: https://api.github.com/repos/apache/beam/statuses/60f90c8dcb229c35a82c7be15e64a89678bae058
	at com.squareup.okhttp.internal.huc.HttpURLConnectionImpl.getInputStream(HttpURLConnectionImpl.java:243)
	at com.squareup.okhttp.internal.huc.DelegatingHttpsURLConnection.getInputStream(DelegatingHttpsURLConnection.java:210)
	at com.squareup.okhttp.internal.huc.HttpsURLConnectionImpl.getInputStream(HttpsURLConnectionImpl.java:25)
	at org.kohsuke.github.Requester.parse(Requester.java:612)
	at org.kohsuke.github.Requester.parse(Requester.java:594)
	at org.kohsuke.github.Requester._to(Requester.java:272)
Caused: org.kohsuke.github.GHFileNotFoundException: {"message":"Not Found","documentation_url":"https://developer.github.com/v3/repos/statuses/#create-a-status"}
	at org.kohsuke.github.Requester.handleApiError(Requester.java:686)
	at org.kohsuke.github.Requester._to(Requester.java:293)
	at org.kohsuke.github.Requester.to(Requester.java:234)
	at org.kohsuke.github.GHRepository.createCommitStatus(GHRepository.java:1075)
	at org.jenkinsci.plugins.github.status.GitHubCommitStatusSetter.perform(GitHubCommitStatusSetter.java:160)
Caused: org.jenkinsci.plugins.github.common.CombineErrorHandler$ErrorHandlingException
	at org.jenkinsci.plugins.github.common.CombineErrorHandler.handle(CombineErrorHandler.java:74)
	at org.jenkinsci.plugins.github.status.GitHubCommitStatusSetter.perform(GitHubCommitStatusSetter.java:164)
	at com.cloudbees.jenkins.GitHubCommitNotifier.perform(GitHubCommitNotifier.java:151)
	at hudson.tasks.BuildStepCompatibilityLayer.perform(BuildStepCompatibilityLayer.java:81)
	at hudson.tasks.BuildStepMonitor$1.perform(BuildStepMonitor.java:20)
	at hudson.model.AbstractBuild$AbstractBuildExecution.perform(AbstractBuild.java:744)
	at hudson.model.AbstractBuild$AbstractBuildExecution.performAllBuildSteps(AbstractBuild.java:690)
	at hudson.model.Build$BuildExecution.post2(Build.java:186)
	at hudson.model.AbstractBuild$AbstractBuildExecution.post(AbstractBuild.java:635)
	at hudson.model.Run.execute(Run.java:1749)
	at hudson.model.FreeStyleBuild.run(FreeStyleBuild.java:43)
	at hudson.model.ResourceController.execute(ResourceController.java:97)
	at hudson.model.Executor.run(Executor.java:429)
Build step 'Set build status on GitHub commit [deprecated]' marked build as failure