You are viewing a plain text version of this content. The canonical link for it is here.
Posted to builds@mesos.apache.org by Apache Jenkins Server <je...@builds.apache.org> on 2016/01/23 00:40:29 UTC

Build failed in Jenkins: mesos-reviewbot #10983

See <https://builds.apache.org/job/mesos-reviewbot/10983/>

------------------------------------------
[...truncated 170071 lines...]
rm -f master/*.o
rm -f master/allocator/.dirstamp
rm -f master/*.lo
rm -f master/allocator/mesos/.deps/.dirstamp
rm -rf ../include/mesos/master/.libs ../include/mesos/master/_libs
rm -f master/allocator/*.o
rm -f master/allocator/mesos/.dirstamp
rm -rf ../include/mesos/module/.libs ../include/mesos/module/_libs
rm -f master/allocator/*.lo
rm -f master/allocator/sorter/drf/.deps/.dirstamp
rm -f master/allocator/mesos/*.o
rm -f master/allocator/sorter/drf/.dirstamp
rm -rf ../include/mesos/quota/.libs ../include/mesos/quota/_libs
rm -f master/allocator/mesos/*.lo
rm -rf ../include/mesos/scheduler/.libs ../include/mesos/scheduler/_libs
rm -f master/allocator/sorter/drf/*.o
rm -f messages/.deps/.dirstamp
rm -f master/allocator/sorter/drf/*.lo
rm -f messages/.dirstamp
rm -f messages/*.o
rm -f module/.deps/.dirstamp
rm -f messages/*.lo
rm -rf ../include/mesos/slave/.libs ../include/mesos/slave/_libs
rm -f module/.dirstamp
rm -f module/*.o
rm -f sched/.deps/.dirstamp
rm -f module/*.lo
rm -f sched/.dirstamp
rm -f sched/*.o
rm -f scheduler/.deps/.dirstamp
rm -f sched/*.lo
rm -f scheduler/.dirstamp
rm -rf ../include/mesos/uri/.libs ../include/mesos/uri/_libs
rm -f scheduler/*.o
rm -f slave/.deps/.dirstamp
rm -rf ../include/mesos/v1/.libs ../include/mesos/v1/_libs
rm -f scheduler/*.lo
rm -f slave/.dirstamp
rm -rf ../include/mesos/v1/executor/.libs ../include/mesos/v1/executor/_libs
rm -f slave/*.o
rm -f slave/container_loggers/.deps/.dirstamp
rm -rf ../include/mesos/v1/scheduler/.libs ../include/mesos/v1/scheduler/_libs
rm -f slave/container_loggers/.dirstamp
rm -f slave/*.lo
rm -f slave/containerizer/.deps/.dirstamp
rm -f slave/container_loggers/*.o
rm -f slave/containerizer/.dirstamp
rm -f slave/container_loggers/*.lo
rm -f slave/containerizer/mesos/.deps/.dirstamp
rm -rf authentication/cram_md5/.libs authentication/cram_md5/_libs
rm -f slave/containerizer/*.o
rm -f slave/containerizer/mesos/.dirstamp
rm -f slave/containerizer/*.lo
rm -f slave/containerizer/mesos/isolators/cgroups/.deps/.dirstamp
rm -f slave/containerizer/mesos/*.o
rm -f slave/containerizer/mesos/isolators/cgroups/.dirstamp
rm -f slave/containerizer/mesos/*.lo
rm -f slave/containerizer/mesos/isolators/filesystem/.deps/.dirstamp
rm -rf authentication/http/.libs authentication/http/_libs
rm -f slave/containerizer/mesos/isolators/cgroups/*.o
rm -f slave/containerizer/mesos/isolators/filesystem/.dirstamp
rm -rf authorizer/.libs authorizer/_libs
rm -f slave/containerizer/mesos/isolators/cgroups/*.lo
rm -f slave/containerizer/mesos/isolators/namespaces/.deps/.dirstamp
rm -rf authorizer/local/.libs authorizer/local/_libs
rm -f slave/containerizer/mesos/isolators/namespaces/.dirstamp
rm -f slave/containerizer/mesos/isolators/filesystem/*.o
rm -rf common/.libs common/_libs
rm -f slave/containerizer/mesos/isolators/network/.deps/.dirstamp
rm -f slave/containerizer/mesos/isolators/filesystem/*.lo
rm -f slave/containerizer/mesos/isolators/network/.dirstamp
rm -f slave/containerizer/mesos/isolators/namespaces/*.o
rm -rf docker/.libs docker/_libs
rm -f slave/containerizer/mesos/isolators/posix/.deps/.dirstamp
rm -f slave/containerizer/mesos/isolators/namespaces/*.lo
rm -f slave/containerizer/mesos/isolators/posix/.dirstamp
rm -rf examples/.libs examples/_libs
rm -f slave/containerizer/mesos/isolators/network/*.o
rm -f slave/containerizer/mesos/provisioner/.deps/.dirstamp
rm -f slave/containerizer/mesos/isolators/network/*.lo
rm -f slave/containerizer/mesos/provisioner/.dirstamp
rm -rf exec/.libs exec/_libs
rm -f slave/containerizer/mesos/isolators/posix/*.o
rm -f slave/containerizer/mesos/provisioner/appc/.deps/.dirstamp
rm -f slave/containerizer/mesos/isolators/posix/*.lo
rm -rf files/.libs files/_libs
rm -f slave/containerizer/mesos/provisioner/appc/.dirstamp
rm -f slave/containerizer/mesos/provisioner/*.o
rm -rf hdfs/.libs hdfs/_libs
rm -f slave/containerizer/mesos/provisioner/backends/.deps/.dirstamp
rm -f slave/containerizer/mesos/provisioner/*.lo
rm -f slave/containerizer/mesos/provisioner/backends/.dirstamp
rm -rf hook/.libs hook/_libs
rm -f slave/containerizer/mesos/provisioner/appc/*.o
rm -rf internal/.libs internal/_libs
rm -f slave/containerizer/mesos/provisioner/docker/.deps/.dirstamp
rm -f slave/containerizer/mesos/provisioner/appc/*.lo
rm -f slave/containerizer/mesos/provisioner/docker/.dirstamp
rm -f slave/containerizer/mesos/provisioner/backends/*.o
rm -rf java/jni/.libs java/jni/_libs
rm -f slave/qos_controllers/.deps/.dirstamp
rm -f slave/containerizer/mesos/provisioner/backends/*.lo
rm -f slave/qos_controllers/.dirstamp
rm -rf jvm/.libs jvm/_libs
rm -f slave/containerizer/mesos/provisioner/docker/*.o
rm -f slave/resource_estimators/.deps/.dirstamp
rm -rf jvm/org/apache/.libs jvm/org/apache/_libs
rm -f slave/containerizer/mesos/provisioner/docker/*.lo
rm -f slave/resource_estimators/.dirstamp
rm -rf linux/.libs linux/_libs
rm -f slave/qos_controllers/*.o
rm -f state/.deps/.dirstamp
rm -f slave/qos_controllers/*.lo
rm -f state/.dirstamp
rm -f slave/resource_estimators/*.o
rm -f tests/.deps/.dirstamp
rm -rf linux/routing/.libs linux/routing/_libs
rm -f slave/resource_estimators/*.lo
rm -f tests/.dirstamp
rm -rf linux/routing/diagnosis/.libs linux/routing/diagnosis/_libs
rm -f state/*.o
rm -f tests/common/.deps/.dirstamp
rm -rf linux/routing/filter/.libs linux/routing/filter/_libs
rm -f state/*.lo
rm -f tests/common/.dirstamp
rm -rf linux/routing/link/.libs linux/routing/link/_libs
rm -f tests/*.o
rm -rf linux/routing/queueing/.libs linux/routing/queueing/_libs
rm -f tests/containerizer/.deps/.dirstamp
rm -rf local/.libs local/_libs
rm -f tests/containerizer/.dirstamp
rm -f uri/.deps/.dirstamp
rm -rf log/.libs log/_libs
rm -f uri/.dirstamp
rm -f uri/fetchers/.deps/.dirstamp
rm -f uri/fetchers/.dirstamp
rm -f usage/.deps/.dirstamp
rm -f usage/.dirstamp
rm -rf log/tool/.libs log/tool/_libs
rm -f v1/.deps/.dirstamp
rm -f v1/.dirstamp
rm -rf logging/.libs logging/_libs
rm -f version/.deps/.dirstamp
rm -rf master/.libs master/_libs
rm -f version/.dirstamp
rm -f watcher/.deps/.dirstamp
rm -f watcher/.dirstamp
rm -f zookeeper/.deps/.dirstamp
rm -f zookeeper/.dirstamp
rm -rf master/allocator/.libs master/allocator/_libs
rm -rf master/allocator/mesos/.libs master/allocator/mesos/_libs
rm -rf master/allocator/sorter/drf/.libs master/allocator/sorter/drf/_libs
rm -rf messages/.libs messages/_libs
rm -rf module/.libs module/_libs
rm -rf sched/.libs sched/_libs
rm -rf scheduler/.libs scheduler/_libs
rm -rf slave/.libs slave/_libs
rm -rf slave/container_loggers/.libs slave/container_loggers/_libs
rm -rf slave/containerizer/.libs slave/containerizer/_libs
rm -rf slave/containerizer/mesos/.libs slave/containerizer/mesos/_libs
rm -rf slave/containerizer/mesos/isolators/cgroups/.libs slave/containerizer/mesos/isolators/cgroups/_libs
rm -rf slave/containerizer/mesos/isolators/filesystem/.libs slave/containerizer/mesos/isolators/filesystem/_libs
rm -rf slave/containerizer/mesos/isolators/namespaces/.libs slave/containerizer/mesos/isolators/namespaces/_libs
rm -f tests/common/*.o
rm -rf slave/containerizer/mesos/isolators/network/.libs slave/containerizer/mesos/isolators/network/_libs
rm -rf slave/containerizer/mesos/isolators/posix/.libs slave/containerizer/mesos/isolators/posix/_libs
rm -f tests/containerizer/*.o
rm -rf slave/containerizer/mesos/provisioner/.libs slave/containerizer/mesos/provisioner/_libs
rm -rf slave/containerizer/mesos/provisioner/appc/.libs slave/containerizer/mesos/provisioner/appc/_libs
rm -f uri/*.o
rm -rf slave/containerizer/mesos/provisioner/backends/.libs slave/containerizer/mesos/provisioner/backends/_libs
rm -f uri/*.lo
rm -f uri/fetchers/*.o
rm -rf slave/containerizer/mesos/provisioner/docker/.libs slave/containerizer/mesos/provisioner/docker/_libs
rm -f uri/fetchers/*.lo
rm -f usage/*.o
rm -f usage/*.lo
rm -f v1/*.o
rm -f v1/*.lo
rm -f version/*.o
rm -f version/*.lo
rm -f watcher/*.o
rm -rf slave/qos_controllers/.libs slave/qos_controllers/_libs
rm -f watcher/*.lo
rm -f zookeeper/*.o
rm -rf slave/resource_estimators/.libs slave/resource_estimators/_libs
rm -f zookeeper/*.lo
rm -rf state/.libs state/_libs
rm -rf uri/.libs uri/_libs
rm -rf uri/fetchers/.libs uri/fetchers/_libs
rm -rf usage/.libs usage/_libs
rm -rf v1/.libs v1/_libs
rm -rf version/.libs version/_libs
rm -rf watcher/.libs watcher/_libs
rm -rf zookeeper/.libs zookeeper/_libs
rm -rf ../include/mesos/.deps ../include/mesos/authentication/.deps ../include/mesos/authorizer/.deps ../include/mesos/containerizer/.deps ../include/mesos/docker/.deps ../include/mesos/executor/.deps ../include/mesos/fetcher/.deps ../include/mesos/maintenance/.deps ../include/mesos/master/.deps ../include/mesos/module/.deps ../include/mesos/quota/.deps ../include/mesos/scheduler/.deps ../include/mesos/slave/.deps ../include/mesos/uri/.deps ../include/mesos/v1/.deps ../include/mesos/v1/executor/.deps ../include/mesos/v1/scheduler/.deps authentication/cram_md5/.deps authentication/http/.deps authorizer/.deps authorizer/local/.deps cli/.deps common/.deps docker/.deps examples/.deps exec/.deps files/.deps hdfs/.deps health-check/.deps hook/.deps internal/.deps java/jni/.deps jvm/.deps jvm/org/apache/.deps launcher/.deps linux/.deps linux/routing/.deps linux/routing/diagnosis/.deps linux/routing/filter/.deps linux/routing/link/.deps linux/routing/queueing/.deps local/.deps log/.deps log/tool/.deps logging/.deps master/.deps master/allocator/.deps master/allocator/mesos/.deps master/allocator/sorter/drf/.deps messages/.deps module/.deps sched/.deps scheduler/.deps slave/.deps slave/container_loggers/.deps slave/containerizer/.deps slave/containerizer/mesos/.deps slave/containerizer/mesos/isolators/cgroups/.deps slave/containerizer/mesos/isolators/filesystem/.deps slave/containerizer/mesos/isolators/namespaces/.deps slave/containerizer/mesos/isolators/network/.deps slave/containerizer/mesos/isolators/posix/.deps slave/containerizer/mesos/provisioner/.deps slave/containerizer/mesos/provisioner/appc/.deps slave/containerizer/mesos/provisioner/backends/.deps slave/containerizer/mesos/provisioner/docker/.deps slave/qos_controllers/.deps slave/resource_estimators/.deps state/.deps tests/.deps tests/common/.deps tests/containerizer/.deps uri/.deps uri/fetchers/.deps usage/.deps v1/.deps version/.deps watcher/.deps zookeeper/.deps
rm -f Makefile
make[2]: Leaving directory `/mesos/mesos-0.27.0/_build/src'
rm -f config.status config.cache config.log configure.lineno config.status.lineno
rm -f Makefile
ERROR: files left in build directory after distclean:
./src/master/replicated_log/CURRENT
./src/master/replicated_log/LOG
./src/master/replicated_log/LOG.old
./src/master/replicated_log/LOCK
./src/master/replicated_log/MANIFEST-000138
./src/master/replicated_log/000143.sst
./src/master/replicated_log/000141.log
./src/credentials
make[1]: *** [distcleancheck] Error 1
make[1]: Leaving directory `/mesos/mesos-0.27.0/_build'
make: *** [distcheck] Error 1
+ docker rmi mesos-1453503460-24287
Untagged: mesos-1453503460-24287:latest
Deleted: cc758a791e525235ee5f4c751cd174f2c7db86e29467dfe052fe1e827332a7f5
Deleted: 9db01a70ebfc506be34bd8f7fe5573de793e88dabded727a5eb3cab23a94f3b4
Deleted: f46a74de2409f891a0dbc428b337ea0d4dfd4de5983e87ff2eb289c2544d8cce
Deleted: 6227e2a902ac3cab3b8722751579c6014dfea57cbf1dd5ce92ba1b5a309f7131
Deleted: e9d87f5773605149a0d2e501607de5f01370e2377347a8a360cdd4314ccd3152
Deleted: b1bae2386dc349a4025cdb1b1e84b28dbaf6adbd00537da99f22d3bed8dae8e6
Deleted: 15ede4ba67dcb28f8ed702c746cff1e055cea66212486b35538535fb1a574219
Deleted: b3a970599df412ffb3ac06170baf33358c5a4a417282090152d011fca49ebce3
Deleted: c27492d1ee4433c05cacd0909d2941a75b4a55858f4b9067da44fb7cfa7c3e6c
Deleted: c6ed0ce6153e82fd3817352aa013807485cd2d8883d454763a174de3463809ff
Deleted: 6e23c98b242af8b1dbfd0d53d0abd451661f291333b5c788af04dbf460bbb47c
Deleted: 96d86ee358d74eed6113c3f6472e716872a2b30bc9a0548e0b557272c5959f7b
Deleted: a4be99f8f9c309620cf71593c36b3e03967906ab7ce0fed144792125c4ae9144
Deleted: 0af80dbd9e6bdd7c5778a06ab7672301f10850b94c1c117b234f9f960b9cc614

Error handling URL https://reviews.apache.org/api/review-requests/42506/reviews/: INTERNAL SERVER ERROR (<!DOCTYPE html>

<html>
 <head>
  <title>500 - Internal Server Error | Review Board</title>
 </head>
 <body>
  <h1>Something broke! (Error 500)</h1>
  <p>
   It appears something broke when you tried to go to here. This is either
   a bug in Review Board or a server configuration error. Please report
   this to your administrator.
  </p>
 </body>
</title>
)
git clean -fd
git reset --hard 5571132747e26e781ed41eebde6b178d61bc2926

Build step 'Execute shell' marked build as failure

Jenkins build is back to normal : mesos-reviewbot #10986

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/mesos-reviewbot/10986/changes>


Build failed in Jenkins: mesos-reviewbot #10985

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/mesos-reviewbot/10985/>

------------------------------------------
[...truncated 172012 lines...]
I0123 02:06:20.532820 31673 master.cpp:466] Using default 'crammd5' authenticator
I0123 02:06:20.532971 31673 master.cpp:535] Using default 'basic' HTTP authenticator
I0123 02:06:20.533103 31673 master.cpp:569] Authorization enabled
I0123 02:06:20.533372 31672 whitelist_watcher.cpp:77] No whitelist given
I0123 02:06:20.533644 31671 hierarchical.cpp:144] Initialized hierarchical allocator process
I0123 02:06:20.535799 31682 master.cpp:1710] The newly elected leader is master@172.17.0.2:59704 with id 9269c11e-ebb3-4b00-8ada-b14b61b43717
I0123 02:06:20.535837 31682 master.cpp:1723] Elected as the leading master!
I0123 02:06:20.535861 31682 master.cpp:1468] Recovering from registrar
I0123 02:06:20.536193 31682 registrar.cpp:307] Recovering registrar
I0123 02:06:20.549899 31679 leveldb.cpp:304] Persisting metadata (8 bytes) to leveldb took 21.590355ms
I0123 02:06:20.549978 31679 replica.cpp:320] Persisted replica status to STARTING
I0123 02:06:20.550380 31679 recover.cpp:473] Replica is in STARTING status
I0123 02:06:20.551895 31681 replica.cpp:673] Replica in STARTING status received a broadcasted recover request from (14008)@172.17.0.2:59704
I0123 02:06:20.552196 31680 recover.cpp:193] Received a recover response from a replica in STARTING status
I0123 02:06:20.552582 31681 recover.cpp:564] Updating replica status to VOTING
I0123 02:06:20.575002 31680 leveldb.cpp:304] Persisting metadata (8 bytes) to leveldb took 22.205461ms
I0123 02:06:20.575090 31680 replica.cpp:320] Persisted replica status to VOTING
I0123 02:06:20.575361 31681 recover.cpp:578] Successfully joined the Paxos group
I0123 02:06:20.575721 31681 recover.cpp:462] Recover process terminated
I0123 02:06:20.576359 31680 log.cpp:659] Attempting to start the writer
I0123 02:06:20.577942 31672 replica.cpp:493] Replica received implicit promise request from (14009)@172.17.0.2:59704 with proposal 1
I0123 02:06:20.600093 31672 leveldb.cpp:304] Persisting metadata (8 bytes) to leveldb took 22.093818ms
I0123 02:06:20.600189 31672 replica.cpp:342] Persisted promised to 1
I0123 02:06:20.601217 31679 coordinator.cpp:238] Coordinator attempting to fill missing positions
I0123 02:06:20.602694 31674 replica.cpp:388] Replica received explicit promise request from (14010)@172.17.0.2:59704 for position 0 with proposal 2
I0123 02:06:20.625217 31674 leveldb.cpp:341] Persisting action (8 bytes) to leveldb took 22.458557ms
I0123 02:06:20.625299 31674 replica.cpp:712] Persisted action at 0
I0123 02:06:20.626842 31675 replica.cpp:537] Replica received write request for position 0 from (14011)@172.17.0.2:59704
I0123 02:06:20.626920 31675 leveldb.cpp:436] Reading position from leveldb took 37707ns
2016-01-23 02:06:20,641:31652(0x2adcd6793700):ZOO_ERROR@handle_socket_error_msg@1697: Socket [127.0.0.1:60803] zk retcode=-4, errno=111(Connection refused): server refused to accept the client
I0123 02:06:20.650320 31675 leveldb.cpp:341] Persisting action (14 bytes) to leveldb took 23.335152ms
I0123 02:06:20.650403 31675 replica.cpp:712] Persisted action at 0
I0123 02:06:20.651340 31675 replica.cpp:691] Replica received learned notice for position 0 from @0.0.0.0:0
I0123 02:06:20.675443 31675 leveldb.cpp:341] Persisting action (16 bytes) to leveldb took 24.051576ms
I0123 02:06:20.675529 31675 replica.cpp:712] Persisted action at 0
I0123 02:06:20.675561 31675 replica.cpp:697] Replica learned NOP action at position 0
I0123 02:06:20.676744 31683 log.cpp:675] Writer started with ending position 0
I0123 02:06:20.678205 31683 leveldb.cpp:436] Reading position from leveldb took 51209ns
I0123 02:06:20.679483 31686 registrar.cpp:340] Successfully fetched the registry (0B) in 143.240704ms
I0123 02:06:20.679675 31686 registrar.cpp:439] Applied 1 operations in 34522ns; attempting to update the 'registry'
I0123 02:06:20.680625 31678 log.cpp:683] Attempting to append 170 bytes to the log
I0123 02:06:20.680918 31678 coordinator.cpp:348] Coordinator attempting to write APPEND action at position 1
I0123 02:06:20.682092 31678 replica.cpp:537] Replica received write request for position 1 from (14012)@172.17.0.2:59704
I0123 02:06:20.706132 31678 leveldb.cpp:341] Persisting action (189 bytes) to leveldb took 23.86483ms
I0123 02:06:20.706218 31678 replica.cpp:712] Persisted action at 1
I0123 02:06:20.707806 31674 replica.cpp:691] Replica received learned notice for position 1 from @0.0.0.0:0
I0123 02:06:20.731256 31674 leveldb.cpp:341] Persisting action (191 bytes) to leveldb took 23.377305ms
I0123 02:06:20.731356 31674 replica.cpp:712] Persisted action at 1
I0123 02:06:20.731395 31674 replica.cpp:697] Replica learned APPEND action at position 1
I0123 02:06:20.732888 31673 registrar.cpp:484] Successfully updated the 'registry' in 53.079808ms
I0123 02:06:20.733036 31673 registrar.cpp:370] Successfully recovered registrar
I0123 02:06:20.733305 31674 log.cpp:702] Attempting to truncate the log to 1
I0123 02:06:20.733489 31673 master.cpp:1520] Recovered 0 slaves from the Registry (131B) ; allowing 10mins for slaves to re-register
I0123 02:06:20.733578 31673 coordinator.cpp:348] Coordinator attempting to write TRUNCATE action at position 2
I0123 02:06:20.733647 31674 hierarchical.cpp:171] Skipping recovery of hierarchical allocator: nothing to recover
I0123 02:06:20.734589 31673 replica.cpp:537] Replica received write request for position 2 from (14013)@172.17.0.2:59704
I0123 02:06:20.759109 31673 leveldb.cpp:341] Persisting action (16 bytes) to leveldb took 24.456231ms
I0123 02:06:20.759218 31673 replica.cpp:712] Persisted action at 2
I0123 02:06:20.760371 31677 replica.cpp:691] Replica received learned notice for position 2 from @0.0.0.0:0
I0123 02:06:20.784240 31677 leveldb.cpp:341] Persisting action (18 bytes) to leveldb took 23.739032ms
I0123 02:06:20.784487 31677 leveldb.cpp:399] Deleting ~1 keys from leveldb took 68463ns
I0123 02:06:20.784528 31677 replica.cpp:712] Persisted action at 2
I0123 02:06:20.784560 31677 replica.cpp:697] Replica learned TRUNCATE action at position 2
I0123 02:06:20.795238 31652 scheduler.cpp:154] Version: 0.27.0
I0123 02:06:20.796228 31679 scheduler.cpp:236] New master detected at master@172.17.0.2:59704
I0123 02:06:20.797611 31681 scheduler.cpp:298] Sending SUBSCRIBE call to master@172.17.0.2:59704
I0123 02:06:20.799811 31674 process.cpp:3141] Handling HTTP event for process 'master' with path: '/master/api/v1/scheduler'
I0123 02:06:20.800353 31677 http.cpp:503] HTTP POST for /master/api/v1/scheduler from 172.17.0.2:40875
I0123 02:06:20.800606 31677 master.cpp:1972] Received subscription request for HTTP framework 'default'
I0123 02:06:20.800673 31677 master.cpp:1749] Authorizing framework principal 'test-principal' to receive offers for role '*'
I0123 02:06:20.800982 31677 master.cpp:2063] Subscribing framework 'default' with checkpointing disabled and capabilities [  ]
I0123 02:06:20.801522 31676 hierarchical.cpp:265] Added framework 9269c11e-ebb3-4b00-8ada-b14b61b43717-0000
I0123 02:06:20.801599 31675 master.hpp:1658] Sending heartbeat to 9269c11e-ebb3-4b00-8ada-b14b61b43717-0000
I0123 02:06:20.801718 31676 hierarchical.cpp:1355] No resources available to allocate!
I0123 02:06:20.802110 31676 hierarchical.cpp:1450] No inverse offers to send out!
I0123 02:06:20.802413 31676 hierarchical.cpp:1090] Performed allocation for 0 slaves in 741959ns
I0123 02:06:20.802505 31680 scheduler.cpp:457] Enqueuing event SUBSCRIBED received from master@172.17.0.2:59704
I0123 02:06:20.803406 31671 scheduler.cpp:457] Enqueuing event HEARTBEAT received from master@172.17.0.2:59704
I0123 02:06:20.804009 31671 master_maintenance_tests.cpp:177] Ignoring HEARTBEAT event
I0123 02:06:20.804446 31680 scheduler.cpp:298] Sending REQUEST call to master@172.17.0.2:59704
I0123 02:06:20.807059 31679 process.cpp:3141] Handling HTTP event for process 'master' with path: '/master/api/v1/scheduler'
I0123 02:06:20.807595 31679 http.cpp:503] HTTP POST for /master/api/v1/scheduler from 172.17.0.2:40876
I0123 02:06:20.807728 31679 master.cpp:2717] Processing REQUEST call for framework 9269c11e-ebb3-4b00-8ada-b14b61b43717-0000 (default)
I0123 02:06:20.807996 31679 hierarchical.cpp:585] Received resource request from framework 9269c11e-ebb3-4b00-8ada-b14b61b43717-0000
I0123 02:06:20.808382 31652 master.cpp:1025] Master terminating
I0123 02:06:20.808681 31675 hierarchical.cpp:326] Removed framework 9269c11e-ebb3-4b00-8ada-b14b61b43717-0000
E0123 02:06:20.809998 31682 scheduler.cpp:431] End-Of-File received from master. The master closed the event stream
[       OK ] ContentType/SchedulerTest.Request/0 (444 ms)
[ RUN      ] ContentType/SchedulerTest.Request/1
I0123 02:06:20.939666 31652 leveldb.cpp:174] Opened db in 122.986104ms
I0123 02:06:20.988523 31652 leveldb.cpp:181] Compacted db in 48.785639ms
I0123 02:06:20.988615 31652 leveldb.cpp:196] Created db iterator in 24462ns
I0123 02:06:20.988633 31652 leveldb.cpp:202] Seeked to beginning of db in 2905ns
I0123 02:06:20.988646 31652 leveldb.cpp:271] Iterated through 0 keys in the db in 293ns
I0123 02:06:20.988698 31652 replica.cpp:779] Replica recovered with log positions 0 -> 0 with 1 holes and 0 unlearned
I0123 02:06:20.989498 31671 recover.cpp:447] Starting replica recovery
I0123 02:06:20.990113 31671 recover.cpp:473] Replica is in EMPTY status
I0123 02:06:20.991971 31673 replica.cpp:673] Replica in EMPTY status received a broadcasted recover request from (14021)@172.17.0.2:59704
I0123 02:06:20.992553 31673 recover.cpp:193] Received a recover response from a replica in EMPTY status
I0123 02:06:20.994218 31686 recover.cpp:564] Updating replica status to STARTING
I0123 02:06:20.995174 31680 master.cpp:374] Master 2f9d9091-5037-434a-a8a2-dc1bae7ebc49 (f64b5050d06b) started on 172.17.0.2:59704
I0123 02:06:20.995273 31680 master.cpp:376] Flags at startup: --acls="" --allocation_interval="1secs" --allocator="HierarchicalDRF" --authenticate="false" --authenticate_http="true" --authenticate_slaves="true" --authenticators="crammd5" --authorizers="local" --credentials="/tmp/km85f0/credentials" --framework_sorter="drf" --help="false" --hostname_lookup="true" --http_authenticators="basic" --initialize_driver_logging="true" --log_auto_initialize="true" --logbufsecs="0" --logging_level="INFO" --max_completed_frameworks="50" --max_completed_tasks_per_framework="1000" --max_slave_ping_timeouts="5" --quiet="false" --recovery_slave_removal_limit="100%" --registry="replicated_log" --registry_fetch_timeout="1mins" --registry_store_timeout="25secs" --registry_strict="true" --root_submissions="true" --slave_ping_timeout="15secs" --slave_reregister_timeout="10mins" --user_sorter="drf" --version="false" --webui_dir="/mesos/mesos-0.27.0/_inst/share/mesos/webui" --work_dir="/tmp/km85f0/master" --zk_session_timeout="10secs"
I0123 02:06:20.996271 31680 master.cpp:423] Master allowing unauthenticated frameworks to register
I0123 02:06:20.996470 31680 master.cpp:426] Master only allowing authenticated slaves to register
I0123 02:06:20.996670 31680 credentials.hpp:35] Loading credentials for authentication from '/tmp/km85f0/credentials'
I0123 02:06:20.997481 31680 master.cpp:466] Using default 'crammd5' authenticator
I0123 02:06:20.998361 31680 master.cpp:535] Using default 'basic' HTTP authenticator
I0123 02:06:20.998710 31680 master.cpp:569] Authorization enabled
I0123 02:06:20.999145 31675 hierarchical.cpp:144] Initialized hierarchical allocator process
I0123 02:06:20.999239 31675 whitelist_watcher.cpp:77] No whitelist given
I0123 02:06:21.001420 31680 master.cpp:1710] The newly elected leader is master@172.17.0.2:59704 with id 2f9d9091-5037-434a-a8a2-dc1bae7ebc49
I0123 02:06:21.001766 31680 master.cpp:1723] Elected as the leading master!
I0123 02:06:21.002008 31680 master.cpp:1468] Recovering from registrar
I0123 02:06:21.002459 31684 registrar.cpp:307] Recovering registrar
I0123 02:06:21.031728 31686 leveldb.cpp:304] Persisting metadata (8 bytes) to leveldb took 37.1105ms
I0123 02:06:21.031934 31686 replica.cpp:320] Persisted replica status to STARTING
I0123 02:06:21.032517 31686 recover.cpp:473] Replica is in STARTING status
I0123 02:06:21.034843 31686 replica.cpp:673] Replica in STARTING status received a broadcasted recover request from (14023)@172.17.0.2:59704
I0123 02:06:21.035609 31686 recover.cpp:193] Received a recover response from a replica in STARTING status
I0123 02:06:21.036530 31686 recover.cpp:564] Updating replica status to VOTING
I0123 02:06:21.067854 31686 leveldb.cpp:304] Persisting metadata (8 bytes) to leveldb took 30.924506ms
I0123 02:06:21.067939 31686 replica.cpp:320] Persisted replica status to VOTING
I0123 02:06:21.068192 31686 recover.cpp:578] Successfully joined the Paxos group
I0123 02:06:21.068413 31686 recover.cpp:462] Recover process terminated
I0123 02:06:21.069138 31686 log.cpp:659] Attempting to start the writer
I0123 02:06:21.071195 31686 replica.cpp:493] Replica received implicit promise request from (14024)@172.17.0.2:59704 with proposal 1
I0123 02:06:21.090575 31686 leveldb.cpp:304] Persisting metadata (8 bytes) to leveldb took 19.229332ms
I0123 02:06:21.090664 31686 replica.cpp:342] Persisted promised to 1
I0123 02:06:21.091559 31684 coordinator.cpp:238] Coordinator attempting to fill missing positions
I0123 02:06:21.093507 31672 replica.cpp:388] Replica received explicit promise request from (14025)@172.17.0.2:59704 for position 0 with proposal 2
I0123 02:06:21.115363 31672 leveldb.cpp:341] Persisting action (8 bytes) to leveldb took 21.780599ms
I0123 02:06:21.115444 31672 replica.cpp:712] Persisted action at 0
I0123 02:06:21.117086 31672 replica.cpp:537] Replica received write request for position 0 from (14026)@172.17.0.2:59704
I0123 02:06:21.117182 31672 leveldb.cpp:436] Reading position from leveldb took 42343ns
I0123 02:06:21.140470 31672 leveldb.cpp:341] Persisting action (14 bytes) to leveldb took 23.224279ms
I0123 02:06:21.140672 31672 replica.cpp:712] Persisted action at 0
I0123 02:06:21.143831 31671 replica.cpp:691] Replica received learned notice for position 0 from @0.0.0.0:0
I0123 02:06:21.165582 31671 leveldb.cpp:341] Persisting action (16 bytes) to leveldb took 21.591899ms
I0123 02:06:21.165664 31671 replica.cpp:712] Persisted action at 0
I0123 02:06:21.165699 31671 replica.cpp:697] Replica learned NOP action at position 0
I0123 02:06:21.166811 31671 log.cpp:675] Writer started with ending position 0
I0123 02:06:21.168396 31677 leveldb.cpp:436] Reading position from leveldb took 79088ns
I0123 02:06:21.169817 31677 registrar.cpp:340] Successfully fetched the registry (0B) in 167.307776ms
I0123 02:06:21.170058 31677 registrar.cpp:439] Applied 1 operations in 35370ns; attempting to update the 'registry'
I0123 02:06:21.170953 31677 log.cpp:683] Attempting to append 170 bytes to the log
I0123 02:06:21.171257 31674 coordinator.cpp:348] Coordinator attempting to write APPEND action at position 1
I0123 02:06:21.172217 31677 replica.cpp:537] Replica received write request for position 1 from (14027)@172.17.0.2:59704
I0123 02:06:21.190701 31677 leveldb.cpp:341] Persisting action (189 bytes) to leveldb took 18.342802ms
I0123 02:06:21.190783 31677 replica.cpp:712] Persisted action at 1
I0123 02:06:21.192080 31677 replica.cpp:691] Replica received learned notice for position 1 from @0.0.0.0:0
I0123 02:06:21.215868 31677 leveldb.cpp:341] Persisting action (191 bytes) to leveldb took 23.736161ms
I0123 02:06:21.215960 31677 replica.cpp:712] Persisted action at 1
I0123 02:06:21.215996 31677 replica.cpp:697] Replica learned APPEND action at position 1
I0123 02:06:21.218189 31677 registrar.cpp:484] Successfully updated the 'registry' in 48.028928ms
I0123 02:06:21.218374 31677 registrar.cpp:370] Successfully recovered registrar
I0123 02:06:21.218631 31677 log.cpp:702] Attempting to truncate the log to 1
I0123 02:06:21.219348 31674 coordinator.cpp:348] Coordinator attempting to write TRUNCATE action at position 2
I0123 02:06:21.219533 31685 hierarchical.cpp:171] Skipping recovery of hierarchical allocator: nothing to recover
I0123 02:06:21.219291 31677 master.cpp:1520] Recovered 0 slaves from the Registry (131B) ; allowing 10mins for slaves to re-register
I0123 02:06:21.220619 31674 replica.cpp:537] Replica received write request for position 2 from (14028)@172.17.0.2:59704
I0123 02:06:21.241005 31674 leveldb.cpp:341] Persisting action (16 bytes) to leveldb took 20.308078ms
I0123 02:06:21.241107 31674 replica.cpp:712] Persisted action at 2
I0123 02:06:21.242400 31676 replica.cpp:691] Replica received learned notice for position 2 from @0.0.0.0:0
I0123 02:06:21.266158 31676 leveldb.cpp:341] Persisting action (18 bytes) to leveldb took 23.698586ms
I0123 02:06:21.266301 31676 leveldb.cpp:399] Deleting ~1 keys from leveldb took 62561ns
I0123 02:06:21.266332 31676 replica.cpp:712] Persisted action at 2
I0123 02:06:21.266367 31676 replica.cpp:697] Replica learned TRUNCATE action at position 2
I0123 02:06:21.274930 31652 scheduler.cpp:154] Version: 0.27.0
I0123 02:06:21.275671 31676 scheduler.cpp:236] New master detected at master@172.17.0.2:59704
I0123 02:06:21.280503 31676 scheduler.cpp:298] Sending SUBSCRIBE call to master@172.17.0.2:59704
I0123 02:06:21.284109 31683 process.cpp:3141] Handling HTTP event for process 'master' with path: '/master/api/v1/scheduler'
I0123 02:06:21.284819 31683 http.cpp:503] HTTP POST for /master/api/v1/scheduler from 172.17.0.2:40880
I0123 02:06:21.285492 31683 master.cpp:1972] Received subscription request for HTTP framework 'default'
I0123 02:06:21.285640 31683 master.cpp:1749] Authorizing framework principal 'test-principal' to receive offers for role '*'
I0123 02:06:21.286226 31684 master.cpp:2063] Subscribing framework 'default' with checkpointing disabled and capabilities [  ]
I0123 02:06:21.286653 31679 hierarchical.cpp:265] Added framework 2f9d9091-5037-434a-a8a2-dc1bae7ebc49-0000
I0123 02:06:21.286823 31679 hierarchical.cpp:1355] No resources available to allocate!
I0123 02:06:21.286957 31679 hierarchical.cpp:1450] No inverse offers to send out!
I0123 02:06:21.286872 31684 master.hpp:1658] Sending heartbeat to 2f9d9091-5037-434a-a8a2-dc1bae7ebc49-0000
I0123 02:06:21.287348 31679 hierarchical.cpp:1090] Performed allocation for 0 slaves in 574197ns
I0123 02:06:21.289058 31682 scheduler.cpp:457] Enqueuing event SUBSCRIBED received from master@172.17.0.2:59704
I0123 02:06:21.289542 31682 scheduler.cpp:457] Enqueuing event HEARTBEAT received from master@172.17.0.2:59704
I0123 02:06:21.290285 31683 master_maintenance_tests.cpp:177] Ignoring HEARTBEAT event
I0123 02:06:21.291369 31683 scheduler.cpp:298] Sending REQUEST call to master@172.17.0.2:59704
I0123 02:06:21.295339 31673 process.cpp:3141] Handling HTTP event for process 'master' with path: '/master/api/v1/scheduler'
I0123 02:06:21.295876 31673 http.cpp:503] HTTP POST for /master/api/v1/scheduler from 172.17.0.2:40881
I0123 02:06:21.296222 31673 master.cpp:2717] Processing REQUEST call for framework 2f9d9091-5037-434a-a8a2-dc1bae7ebc49-0000 (default)
I0123 02:06:21.296473 31673 hierarchical.cpp:585] Received resource request from framework 2f9d9091-5037-434a-a8a2-dc1bae7ebc49-0000
I0123 02:06:21.297246 31652 master.cpp:1025] Master terminating
I0123 02:06:21.298280 31682 hierarchical.cpp:326] Removed framework 2f9d9091-5037-434a-a8a2-dc1bae7ebc49-0000
E0123 02:06:21.299242 31679 scheduler.cpp:431] End-Of-File received from master. The master closed the event stream
[       OK ] ContentType/SchedulerTest.Request/1 (496 ms)
[----------] 22 tests from ContentType/SchedulerTest (15493 ms total)

[----------] Global test environment tear-down
[==========] 947 tests from 123 test cases ran. (744707 ms total)
[  PASSED  ] 946 tests.
[  FAILED  ] 1 test, listed below:
[  FAILED  ] ProvisionerDockerLocalStoreTest.MetadataManagerInitialization

 1 FAILED TEST
  YOU HAVE 8 DISABLED TESTS

make[4]: *** [check-local] Error 1
make[4]: Leaving directory `/mesos/mesos-0.27.0/_build/src'
make[3]: *** [check-am] Error 2
make[3]: Leaving directory `/mesos/mesos-0.27.0/_build/src'
make[2]: *** [check] Error 2
make[2]: Leaving directory `/mesos/mesos-0.27.0/_build/src'
make[1]: *** [check-recursive] Error 1
make[1]: Leaving directory `/mesos/mesos-0.27.0/_build'
make: *** [distcheck] Error 1
+ docker rmi mesos-1453511765-13537
Untagged: mesos-1453511765-13537:latest
Deleted: 1ff71e3d96e5fdfcd8212e49e1c4e436a27518f483cf386a46d807c7eff7c5ad
Deleted: 4133c28e89c845392588ba8c4aab7b2b12a92e79cf80e4e6b5d2e8410bade87f
Deleted: 1af5a6b2ec92bdab5774dda84ed592858951d376431b215fa90a0d42619535d4
Deleted: d474670552b6771604bd83477f2780608154610b59fcbaa8bc6f6902f0085b8a
Deleted: 2d4a7641599e5bd9e4db6f4f990633b3c550968ea270ef8159313cc92718d4be
Deleted: 3bf0b4551483ef90f5927990ce1fb1f817adf70d236e27f31222e46012755102
Deleted: 7b1bb5f9dadffcaaf51d8bd004eb6763e951ec4251cb816fe8d86bfa60b3e169
Deleted: 5e91c49f768632319e086c90e36df335b2219534b0e2b3af63b741d1e835e539
Deleted: 58aa309c05e3c048f36cf657b2b392125f3d7cad621cd7e6ada15e8f79ed54dd
Deleted: caf23e2757de6d85f4d3eb572e40bf27b608f97e4ed53142d3bc7d1a34a9dfcb
Deleted: 1f0c0bc3c6977a4b8fdd062e1c50dd585de6378b56517eaabd6a4df451fefc01
Deleted: bcc6d25cac4e2afa9cc63cece0a87d1fa8798825686cf481a12b12c13c4b25df
Deleted: a0f182d2e6e32fefb83ede6eef517f37be53673ba421f0a28d023cc8d1c1a625
Deleted: 3b94a7113a05f468d4c8a712e6a1ce115bb5fb1183290e8d32dd94659383d84c

Error handling URL https://reviews.apache.org/api/review-requests/42506/reviews/: INTERNAL SERVER ERROR (<!DOCTYPE html>

<html>
 <head>
  <title>500 - Internal Server Error | Review Board</title>
 </head>
 <body>
  <h1>Something broke! (Error 500)</h1>
  <p>
   It appears something broke when you tried to go to here. This is either
   a bug in Review Board or a server configuration error. Please report
   this to your administrator.
  </p>
 </body>
</title>
)
git clean -fd
git reset --hard 5571132747e26e781ed41eebde6b178d61bc2926

Build step 'Execute shell' marked build as failure

Build failed in Jenkins: mesos-reviewbot #10984

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/mesos-reviewbot/10984/>

------------------------------------------
[...truncated 170025 lines...]
rm -f logging/*.lo
rm -f master/.dirstamp
rm -rf ../include/mesos/scheduler/.libs ../include/mesos/scheduler/_libs
rm -f master/*.o
rm -f master/allocator/.deps/.dirstamp
rm -rf ../include/mesos/slave/.libs ../include/mesos/slave/_libs
rm -f master/allocator/.dirstamp
rm -f master/*.lo
rm -f master/allocator/mesos/.deps/.dirstamp
rm -f master/allocator/*.o
rm -f master/allocator/mesos/.dirstamp
rm -f master/allocator/*.lo
rm -f master/allocator/sorter/drf/.deps/.dirstamp
rm -f master/allocator/mesos/*.o
rm -f master/allocator/sorter/drf/.dirstamp
rm -f master/allocator/mesos/*.lo
rm -f messages/.deps/.dirstamp
rm -f master/allocator/sorter/drf/*.o
rm -rf ../include/mesos/uri/.libs ../include/mesos/uri/_libs
rm -f messages/.dirstamp
rm -f master/allocator/sorter/drf/*.lo
rm -f module/.deps/.dirstamp
rm -rf ../include/mesos/v1/.libs ../include/mesos/v1/_libs
rm -f messages/*.o
rm -f module/.dirstamp
rm -rf ../include/mesos/v1/executor/.libs ../include/mesos/v1/executor/_libs
rm -f messages/*.lo
rm -f sched/.deps/.dirstamp
rm -rf ../include/mesos/v1/scheduler/.libs ../include/mesos/v1/scheduler/_libs
rm -f sched/.dirstamp
rm -f module/*.o
rm -f module/*.lo
rm -f scheduler/.deps/.dirstamp
rm -rf authentication/cram_md5/.libs authentication/cram_md5/_libs
rm -f sched/*.o
rm -f scheduler/.dirstamp
rm -f sched/*.lo
rm -f slave/.deps/.dirstamp
rm -f scheduler/*.o
rm -f slave/.dirstamp
rm -rf authentication/http/.libs authentication/http/_libs
rm -f scheduler/*.lo
rm -f slave/container_loggers/.deps/.dirstamp
rm -rf authorizer/.libs authorizer/_libs
rm -f slave/*.o
rm -f slave/container_loggers/.dirstamp
rm -f slave/*.lo
rm -f slave/containerizer/.deps/.dirstamp
rm -rf authorizer/local/.libs authorizer/local/_libs
rm -f slave/containerizer/.dirstamp
rm -f slave/container_loggers/*.o
rm -rf common/.libs common/_libs
rm -f slave/containerizer/mesos/.deps/.dirstamp
rm -f slave/container_loggers/*.lo
rm -f slave/containerizer/mesos/.dirstamp
rm -f slave/containerizer/*.o
rm -f slave/containerizer/mesos/isolators/cgroups/.deps/.dirstamp
rm -f slave/containerizer/*.lo
rm -f slave/containerizer/mesos/isolators/cgroups/.dirstamp
rm -f slave/containerizer/mesos/*.o
rm -f slave/containerizer/mesos/isolators/filesystem/.deps/.dirstamp
rm -f slave/containerizer/mesos/*.lo
rm -f slave/containerizer/mesos/isolators/filesystem/.dirstamp
rm -rf docker/.libs docker/_libs
rm -f slave/containerizer/mesos/isolators/namespaces/.deps/.dirstamp
rm -f slave/containerizer/mesos/isolators/cgroups/*.o
rm -f slave/containerizer/mesos/isolators/namespaces/.dirstamp
rm -f slave/containerizer/mesos/isolators/cgroups/*.lo
rm -rf examples/.libs examples/_libs
rm -f slave/containerizer/mesos/isolators/network/.deps/.dirstamp
rm -f slave/containerizer/mesos/isolators/filesystem/*.o
rm -f slave/containerizer/mesos/isolators/network/.dirstamp
rm -f slave/containerizer/mesos/isolators/filesystem/*.lo
rm -f slave/containerizer/mesos/isolators/posix/.deps/.dirstamp
rm -rf exec/.libs exec/_libs
rm -f slave/containerizer/mesos/isolators/namespaces/*.o
rm -f slave/containerizer/mesos/isolators/posix/.dirstamp
rm -f slave/containerizer/mesos/isolators/namespaces/*.lo
rm -rf files/.libs files/_libs
rm -f slave/containerizer/mesos/provisioner/.deps/.dirstamp
rm -f slave/containerizer/mesos/isolators/network/*.o
rm -f slave/containerizer/mesos/isolators/network/*.lo
rm -f slave/containerizer/mesos/provisioner/.dirstamp
rm -rf hdfs/.libs hdfs/_libs
rm -f slave/containerizer/mesos/isolators/posix/*.o
rm -f slave/containerizer/mesos/provisioner/appc/.deps/.dirstamp
rm -f slave/containerizer/mesos/isolators/posix/*.lo
rm -rf hook/.libs hook/_libs
rm -f slave/containerizer/mesos/provisioner/appc/.dirstamp
rm -f slave/containerizer/mesos/provisioner/*.o
rm -rf internal/.libs internal/_libs
rm -f slave/containerizer/mesos/provisioner/*.lo
rm -f slave/containerizer/mesos/provisioner/backends/.deps/.dirstamp
rm -rf java/jni/.libs java/jni/_libs
rm -f slave/containerizer/mesos/provisioner/backends/.dirstamp
rm -f slave/containerizer/mesos/provisioner/appc/*.o
rm -f slave/containerizer/mesos/provisioner/docker/.deps/.dirstamp
rm -f slave/containerizer/mesos/provisioner/appc/*.lo
rm -rf jvm/.libs jvm/_libs
rm -f slave/containerizer/mesos/provisioner/docker/.dirstamp
rm -f slave/containerizer/mesos/provisioner/backends/*.o
rm -rf jvm/org/apache/.libs jvm/org/apache/_libs
rm -f slave/qos_controllers/.deps/.dirstamp
rm -f slave/containerizer/mesos/provisioner/backends/*.lo
rm -f slave/qos_controllers/.dirstamp
rm -rf linux/.libs linux/_libs
rm -f slave/resource_estimators/.deps/.dirstamp
rm -f slave/containerizer/mesos/provisioner/docker/*.o
rm -f slave/resource_estimators/.dirstamp
rm -f slave/containerizer/mesos/provisioner/docker/*.lo
rm -rf linux/routing/.libs linux/routing/_libs
rm -f state/.deps/.dirstamp
rm -f slave/qos_controllers/*.o
rm -rf linux/routing/diagnosis/.libs linux/routing/diagnosis/_libs
rm -f state/.dirstamp
rm -f slave/qos_controllers/*.lo
rm -rf linux/routing/filter/.libs linux/routing/filter/_libs
rm -f tests/.deps/.dirstamp
rm -f slave/resource_estimators/*.o
rm -rf linux/routing/link/.libs linux/routing/link/_libs
rm -f tests/.dirstamp
rm -f slave/resource_estimators/*.lo
rm -rf linux/routing/queueing/.libs linux/routing/queueing/_libs
rm -f tests/common/.deps/.dirstamp
rm -f state/*.o
rm -rf local/.libs local/_libs
rm -f tests/common/.dirstamp
rm -f state/*.lo
rm -f tests/containerizer/.deps/.dirstamp
rm -rf log/.libs log/_libs
rm -f tests/*.o
rm -f tests/containerizer/.dirstamp
rm -f uri/.deps/.dirstamp
rm -f uri/.dirstamp
rm -f uri/fetchers/.deps/.dirstamp
rm -rf log/tool/.libs log/tool/_libs
rm -f uri/fetchers/.dirstamp
rm -f usage/.deps/.dirstamp
rm -rf logging/.libs logging/_libs
rm -f usage/.dirstamp
rm -f v1/.deps/.dirstamp
rm -rf master/.libs master/_libs
rm -f v1/.dirstamp
rm -f version/.deps/.dirstamp
rm -f version/.dirstamp
rm -f watcher/.deps/.dirstamp
rm -f watcher/.dirstamp
rm -f zookeeper/.deps/.dirstamp
rm -f zookeeper/.dirstamp
rm -rf master/allocator/.libs master/allocator/_libs
rm -rf master/allocator/mesos/.libs master/allocator/mesos/_libs
rm -rf master/allocator/sorter/drf/.libs master/allocator/sorter/drf/_libs
rm -rf messages/.libs messages/_libs
rm -rf module/.libs module/_libs
rm -rf sched/.libs sched/_libs
rm -rf scheduler/.libs scheduler/_libs
rm -rf slave/.libs slave/_libs
rm -rf slave/container_loggers/.libs slave/container_loggers/_libs
rm -rf slave/containerizer/.libs slave/containerizer/_libs
rm -rf slave/containerizer/mesos/.libs slave/containerizer/mesos/_libs
rm -rf slave/containerizer/mesos/isolators/cgroups/.libs slave/containerizer/mesos/isolators/cgroups/_libs
rm -rf slave/containerizer/mesos/isolators/filesystem/.libs slave/containerizer/mesos/isolators/filesystem/_libs
rm -rf slave/containerizer/mesos/isolators/namespaces/.libs slave/containerizer/mesos/isolators/namespaces/_libs
rm -rf slave/containerizer/mesos/isolators/network/.libs slave/containerizer/mesos/isolators/network/_libs
rm -rf slave/containerizer/mesos/isolators/posix/.libs slave/containerizer/mesos/isolators/posix/_libs
rm -rf slave/containerizer/mesos/provisioner/.libs slave/containerizer/mesos/provisioner/_libs
rm -rf slave/containerizer/mesos/provisioner/appc/.libs slave/containerizer/mesos/provisioner/appc/_libs
rm -f tests/common/*.o
rm -rf slave/containerizer/mesos/provisioner/backends/.libs slave/containerizer/mesos/provisioner/backends/_libs
rm -f tests/containerizer/*.o
rm -rf slave/containerizer/mesos/provisioner/docker/.libs slave/containerizer/mesos/provisioner/docker/_libs
rm -rf slave/qos_controllers/.libs slave/qos_controllers/_libs
rm -rf slave/resource_estimators/.libs slave/resource_estimators/_libs
rm -rf state/.libs state/_libs
rm -f uri/*.o
rm -f uri/*.lo
rm -f uri/fetchers/*.o
rm -rf uri/.libs uri/_libs
rm -f uri/fetchers/*.lo
rm -rf uri/fetchers/.libs uri/fetchers/_libs
rm -f usage/*.o
rm -f usage/*.lo
rm -f v1/*.o
rm -rf usage/.libs usage/_libs
rm -f v1/*.lo
rm -rf v1/.libs v1/_libs
rm -f version/*.o
rm -rf version/.libs version/_libs
rm -f version/*.lo
rm -f watcher/*.o
rm -rf watcher/.libs watcher/_libs
rm -f watcher/*.lo
rm -rf zookeeper/.libs zookeeper/_libs
rm -f zookeeper/*.o
rm -f zookeeper/*.lo
rm -rf ../include/mesos/.deps ../include/mesos/authentication/.deps ../include/mesos/authorizer/.deps ../include/mesos/containerizer/.deps ../include/mesos/docker/.deps ../include/mesos/executor/.deps ../include/mesos/fetcher/.deps ../include/mesos/maintenance/.deps ../include/mesos/master/.deps ../include/mesos/module/.deps ../include/mesos/quota/.deps ../include/mesos/scheduler/.deps ../include/mesos/slave/.deps ../include/mesos/uri/.deps ../include/mesos/v1/.deps ../include/mesos/v1/executor/.deps ../include/mesos/v1/scheduler/.deps authentication/cram_md5/.deps authentication/http/.deps authorizer/.deps authorizer/local/.deps cli/.deps common/.deps docker/.deps examples/.deps exec/.deps files/.deps hdfs/.deps health-check/.deps hook/.deps internal/.deps java/jni/.deps jvm/.deps jvm/org/apache/.deps launcher/.deps linux/.deps linux/routing/.deps linux/routing/diagnosis/.deps linux/routing/filter/.deps linux/routing/link/.deps linux/routing/queueing/.deps local/.deps log/.deps log/tool/.deps logging/.deps master/.deps master/allocator/.deps master/allocator/mesos/.deps master/allocator/sorter/drf/.deps messages/.deps module/.deps sched/.deps scheduler/.deps slave/.deps slave/container_loggers/.deps slave/containerizer/.deps slave/containerizer/mesos/.deps slave/containerizer/mesos/isolators/cgroups/.deps slave/containerizer/mesos/isolators/filesystem/.deps slave/containerizer/mesos/isolators/namespaces/.deps slave/containerizer/mesos/isolators/network/.deps slave/containerizer/mesos/isolators/posix/.deps slave/containerizer/mesos/provisioner/.deps slave/containerizer/mesos/provisioner/appc/.deps slave/containerizer/mesos/provisioner/backends/.deps slave/containerizer/mesos/provisioner/docker/.deps slave/qos_controllers/.deps slave/resource_estimators/.deps state/.deps tests/.deps tests/common/.deps tests/containerizer/.deps uri/.deps uri/fetchers/.deps usage/.deps v1/.deps version/.deps watcher/.deps zookeeper/.deps
rm -f Makefile
make[2]: Leaving directory `/mesos/mesos-0.27.0/_build/src'
rm -f config.status config.cache config.log configure.lineno config.status.lineno
rm -f Makefile
ERROR: files left in build directory after distclean:
./src/credentials
./src/master/replicated_log/MANIFEST-000138
./src/master/replicated_log/CURRENT
./src/master/replicated_log/LOG
./src/master/replicated_log/000141.log
./src/master/replicated_log/LOCK
./src/master/replicated_log/LOG.old
./src/master/replicated_log/000143.sst
make[1]: Leaving directory `/mesos/mesos-0.27.0/_build'
make[1]: *** [distcleancheck] Error 1
make: *** [distcheck] Error 1
+ docker rmi mesos-1453507449-13763
Untagged: mesos-1453507449-13763:latest
Deleted: 9486b4a9b50210a488560c2c67ba09b4066b0298900f6661d0fe6317163de2f0
Deleted: 32701e06ab110c6329b6b2d6b2b1a8abb27e718f8a92c2a2238909378bf1e5c9
Deleted: f296f6da7bddf05101f3044814a9de60ab6231bcd45dffdf5ae4a8365e909de1
Deleted: 8a4f9136a9436475608b2970a1abac51710def17ff86eb0d3d9ea24ee81f42d6
Deleted: 609f5fb711c8c8d4499bd9896367f0910a552509d9784b8f86f65be08904b4c8
Deleted: 2938064b5ea270b0de739115e68b2627fa3194b8483ff2569d7ac291ba06507b
Deleted: a51fc0d61de2e2e21ce91487322b9cdd904a80a0e228923622929e8ab93ffa83
Deleted: 54d99921b045d025ee79a6ab61108a29bed3e8a2a24d9c9525f8a4c0745598cc
Deleted: d1005ec8a872b7cdd4f6a686eee5f61566bdf88510f038eb5ad4088a57b95307
Deleted: 4748cd6c8a25a9992a05cb7bd4a06c0facaf4110de16cbd25808d04c506d585a
Deleted: c1daf3873a465c45d5569a13c7a2f4b5a8d1ba626e17159ca6d9b71415961f9d
Deleted: c71471d31c389a95400703cfd2a858c1db3b299f5fb6913c67b8a8bcf38c5f47
Deleted: 0411bb61476506e0731d229d3425606c04f836feb3a73fa7b55da0f173a5995a
Deleted: c0c69e2639faf1987739b12d246f4d167acd80cb05d2e199949ae158d31bbbe7

Error handling URL https://reviews.apache.org/api/review-requests/42506/reviews/: INTERNAL SERVER ERROR (<!DOCTYPE html>

<html>
 <head>
  <title>500 - Internal Server Error | Review Board</title>
 </head>
 <body>
  <h1>Something broke! (Error 500)</h1>
  <p>
   It appears something broke when you tried to go to here. This is either
   a bug in Review Board or a server configuration error. Please report
   this to your administrator.
  </p>
 </body>
</title>
)
git clean -fd
git reset --hard 5571132747e26e781ed41eebde6b178d61bc2926

Build step 'Execute shell' marked build as failure