You are viewing a plain text version of this content. The canonical link for it is here.
Posted to builds@mesos.apache.org by Apache Jenkins Server <je...@builds.apache.org> on 2017/02/04 02:58:35 UTC

Build failed in Jenkins: Mesos-Buildbot » autotools,gcc,--verbose --enable-libevent --enable-ssl,GLOG_v=1 MESOS_VERBOSE=1,ubuntu:14.04,(docker||Hadoop)&&(!ubuntu-us1)&&(!ubuntu-eu2) #3218

See <https://builds.apache.org/job/Mesos-Buildbot/BUILDTOOL=autotools,COMPILER=gcc,CONFIGURATION=--verbose%20--enable-libevent%20--enable-ssl,ENVIRONMENT=GLOG_v=1%20MESOS_VERBOSE=1,OS=ubuntu%3A14.04,label_exp=(docker%7C%7CHadoop)&&(!ubuntu-us1)&&(!ubuntu-eu2)/3218/changes>

Changes:

[xujyan] Fix handling of total resources in role and quote role sorters.

[xujyan] Updated the persistent volume test framework to include shared volumes.

[xujyan] Fix to potential dangling pointer in `batch()`.

------------------------------------------
[...truncated 182584 lines...]
I0204 02:57:24.408768 14742 status_update_manager.cpp:203] Recovering status update manager
I0204 02:57:24.409077 14749 containerizer.cpp:599] Recovering containerizer
I0204 02:57:24.410765 14743 provisioner.cpp:410] Provisioner recovery complete
I0204 02:57:24.411232 14753 slave.cpp:5422] Finished recovery
I0204 02:57:24.411731 14753 slave.cpp:5596] Querying resource estimator for oversubscribable resources
I0204 02:57:24.412045 14745 slave.cpp:5610] Received oversubscribable resources {} from the resource estimator
I0204 02:57:24.414446 14755 process.cpp:3697] Handling HTTP event for process 'slave(694)' with path: '/slave(694)/monitor/statistics.json'
I0204 02:57:24.416013 14747 http.cpp:871] Authorizing principal 'test-principal' to GET the '/monitor/statistics.json' endpoint
I0204 02:57:24.421056 14752 slave.cpp:801] Agent terminating
[       OK ] Endpoint/SlaveEndpointTest.AuthorizedRequest/1 (34 ms)
[ RUN      ] Endpoint/SlaveEndpointTest.AuthorizedRequest/2
I0204 02:57:24.433151 32237 containerizer.cpp:220] Using isolation: posix/cpu,posix/mem,filesystem/posix,network/cni
W0204 02:57:24.433748 32237 backend.cpp:76] Failed to create 'aufs' backend: AufsBackend requires root privileges
W0204 02:57:24.433873 32237 backend.cpp:76] Failed to create 'bind' backend: BindBackend requires root privileges
I0204 02:57:24.433921 32237 provisioner.cpp:249] Using default backend 'copy'
I0204 02:57:24.437345 14751 slave.cpp:209] Mesos agent started on (695)@172.17.0.2:45507
I0204 02:57:24.437372 14751 slave.cpp:210] Flags at startup: --acls="" --appc_simple_discovery_uri_prefix="http://" --appc_store_dir="/tmp/mesos/store/appc" --authenticate_http_readonly="true" --authenticate_http_readwrite="true" --authenticatee="crammd5" --authentication_backoff_factor="1secs" --authorizer="local" --cgroups_cpu_enable_pids_and_tids_count="false" --cgroups_enable_cfs="false" --cgroups_hierarchy="/sys/fs/cgroup" --cgroups_limit_swap="false" --cgroups_root="mesos" --container_disk_watch_interval="15secs" --containerizers="mesos" --credential="/tmp/Endpoint_SlaveEndpointTest_AuthorizedRequest_2_ozgnsg/credential" --default_role="*" --disk_watch_interval="1mins" --docker="docker" --docker_kill_orphans="true" --docker_registry="https://registry-1.docker.io" --docker_remove_delay="6hrs" --docker_socket="/var/run/docker.sock" --docker_stop_timeout="0ns" --docker_store_dir="/tmp/mesos/store/docker" --docker_volume_checkpoint_dir="/var/run/mesos/isolators/docker/volume" --enforce_container_disk_quota="false" --executor_registration_timeout="1mins" --executor_shutdown_grace_period="5secs" --fetcher_cache_dir="/tmp/Endpoint_SlaveEndpointTest_AuthorizedRequest_2_ozgnsg/fetch" --fetcher_cache_size="2GB" --frameworks_home="" --gc_delay="1weeks" --gc_disk_headroom="0.1" --hadoop_home="" --help="false" --hostname_lookup="true" --http_authenticators="basic" --http_command_executor="false" --http_credentials="/tmp/Endpoint_SlaveEndpointTest_AuthorizedRequest_2_ozgnsg/http_credentials" --http_heartbeat_interval="30secs" --initialize_driver_logging="true" --isolation="posix/cpu,posix/mem" --launcher="posix" --launcher_dir="/mesos/mesos-1.2.0/_build/src" --logbufsecs="0" --logging_level="INFO" --max_completed_executors_per_framework="150" --oversubscribed_resources_interval="15secs" --perf_duration="10secs" --perf_interval="1mins" --qos_correction_interval_min="0ns" --quiet="false" --recover="reconnect" --recovery_timeout="15mins" --registration_backoff_factor="10ms" --resources="cpus:2;gpus:0;mem:1024;disk:1024;ports:[31000-32000]" --revocable_cpu_low_priority="true" --runtime_dir="/tmp/Endpoint_SlaveEndpointTest_AuthorizedRequest_2_ozgnsg" --sandbox_directory="/mnt/mesos/sandbox" --strict="true" --switch_user="true" --systemd_enable_support="true" --systemd_runtime_directory="/run/systemd/system" --version="false" --work_dir="/tmp/Endpoint_SlaveEndpointTest_AuthorizedRequest_2_nwArc4"
I0204 02:57:24.438310 14751 credentials.hpp:86] Loading credential for authentication from '/tmp/Endpoint_SlaveEndpointTest_AuthorizedRequest_2_ozgnsg/credential'
I0204 02:57:24.438519 14751 slave.cpp:352] Agent using credential for: test-principal
I0204 02:57:24.438554 14751 credentials.hpp:37] Loading credentials for authentication from '/tmp/Endpoint_SlaveEndpointTest_AuthorizedRequest_2_ozgnsg/http_credentials'
I0204 02:57:24.438936 14751 http.cpp:919] Using default 'basic' HTTP authenticator for realm 'mesos-agent-readonly'
I0204 02:57:24.439081 14751 http.cpp:919] Using default 'basic' HTTP authenticator for realm 'mesos-agent-readwrite'
I0204 02:57:24.440887 14751 slave.cpp:539] Agent resources: cpus(*):2; mem(*):1024; disk(*):1024; ports(*):[31000-32000]
I0204 02:57:24.440990 14751 slave.cpp:547] Agent attributes: [  ]
I0204 02:57:24.441004 14751 slave.cpp:552] Agent hostname: 5840ebddabc0
I0204 02:57:24.441160 14747 status_update_manager.cpp:177] Pausing sending status updates
I0204 02:57:24.442477 14750 state.cpp:60] Recovering state from '/tmp/Endpoint_SlaveEndpointTest_AuthorizedRequest_2_nwArc4/meta'
I0204 02:57:24.442752 14749 status_update_manager.cpp:203] Recovering status update manager
I0204 02:57:24.443249 14755 containerizer.cpp:599] Recovering containerizer
I0204 02:57:24.444757 14746 provisioner.cpp:410] Provisioner recovery complete
I0204 02:57:24.445204 14753 slave.cpp:5422] Finished recovery
I0204 02:57:24.445931 14753 slave.cpp:5596] Querying resource estimator for oversubscribable resources
I0204 02:57:24.446264 14755 slave.cpp:5610] Received oversubscribable resources {} from the resource estimator
I0204 02:57:24.448395 14753 process.cpp:3697] Handling HTTP event for process 'slave(695)' with path: '/slave(695)/containers'
I0204 02:57:24.449754 14750 http.cpp:871] Authorizing principal 'test-principal' to GET the '/containers' endpoint
I0204 02:57:24.453784 14745 slave.cpp:801] Agent terminating
[       OK ] Endpoint/SlaveEndpointTest.AuthorizedRequest/2 (31 ms)
[ RUN      ] Endpoint/SlaveEndpointTest.UnauthorizedRequest/0
I0204 02:57:24.465322 32237 containerizer.cpp:220] Using isolation: posix/cpu,posix/mem,filesystem/posix,network/cni
W0204 02:57:24.465915 32237 backend.cpp:76] Failed to create 'aufs' backend: AufsBackend requires root privileges
W0204 02:57:24.466029 32237 backend.cpp:76] Failed to create 'bind' backend: BindBackend requires root privileges
I0204 02:57:24.466068 32237 provisioner.cpp:249] Using default backend 'copy'
I0204 02:57:24.469806 14749 slave.cpp:209] Mesos agent started on (696)@172.17.0.2:45507
I0204 02:57:24.469832 14749 slave.cpp:210] Flags at startup: --acls="" --appc_simple_discovery_uri_prefix="http://" --appc_store_dir="/tmp/mesos/store/appc" --authenticate_http_readonly="true" --authenticate_http_readwrite="true" --authenticatee="crammd5" --authentication_backoff_factor="1secs" --authorizer="local" --cgroups_cpu_enable_pids_and_tids_count="false" --cgroups_enable_cfs="false" --cgroups_hierarchy="/sys/fs/cgroup" --cgroups_limit_swap="false" --cgroups_root="mesos" --container_disk_watch_interval="15secs" --containerizers="mesos" --credential="/tmp/Endpoint_SlaveEndpointTest_UnauthorizedRequest_0_OGQOL3/credential" --default_role="*" --disk_watch_interval="1mins" --docker="docker" --docker_kill_orphans="true" --docker_registry="https://registry-1.docker.io" --docker_remove_delay="6hrs" --docker_socket="/var/run/docker.sock" --docker_stop_timeout="0ns" --docker_store_dir="/tmp/mesos/store/docker" --docker_volume_checkpoint_dir="/var/run/mesos/isolators/docker/volume" --enforce_container_disk_quota="false" --executor_registration_timeout="1mins" --executor_shutdown_grace_period="5secs" --fetcher_cache_dir="/tmp/Endpoint_SlaveEndpointTest_UnauthorizedRequest_0_OGQOL3/fetch" --fetcher_cache_size="2GB" --frameworks_home="" --gc_delay="1weeks" --gc_disk_headroom="0.1" --hadoop_home="" --help="false" --hostname_lookup="true" --http_authenticators="basic" --http_command_executor="false" --http_credentials="/tmp/Endpoint_SlaveEndpointTest_UnauthorizedRequest_0_OGQOL3/http_credentials" --http_heartbeat_interval="30secs" --initialize_driver_logging="true" --isolation="posix/cpu,posix/mem" --launcher="posix" --launcher_dir="/mesos/mesos-1.2.0/_build/src" --logbufsecs="0" --logging_level="INFO" --max_completed_executors_per_framework="150" --oversubscribed_resources_interval="15secs" --perf_duration="10secs" --perf_interval="1mins" --qos_correction_interval_min="0ns" --quiet="false" --recover="reconnect" --recovery_timeout="15mins" --registration_backoff_factor="10ms" --resources="cpus:2;gpus:0;mem:1024;disk:1024;ports:[31000-32000]" --revocable_cpu_low_priority="true" --runtime_dir="/tmp/Endpoint_SlaveEndpointTest_UnauthorizedRequest_0_OGQOL3" --sandbox_directory="/mnt/mesos/sandbox" --strict="true" --switch_user="true" --systemd_enable_support="true" --systemd_runtime_directory="/run/systemd/system" --version="false" --work_dir="/tmp/Endpoint_SlaveEndpointTest_UnauthorizedRequest_0_tFJFqR"
I0204 02:57:24.470487 14749 credentials.hpp:86] Loading credential for authentication from '/tmp/Endpoint_SlaveEndpointTest_UnauthorizedRequest_0_OGQOL3/credential'
I0204 02:57:24.470645 14749 slave.cpp:352] Agent using credential for: test-principal
I0204 02:57:24.470671 14749 credentials.hpp:37] Loading credentials for authentication from '/tmp/Endpoint_SlaveEndpointTest_UnauthorizedRequest_0_OGQOL3/http_credentials'
I0204 02:57:24.470937 14749 http.cpp:919] Using default 'basic' HTTP authenticator for realm 'mesos-agent-readonly'
I0204 02:57:24.471082 14749 http.cpp:919] Using default 'basic' HTTP authenticator for realm 'mesos-agent-readwrite'
I0204 02:57:24.472789 14749 slave.cpp:539] Agent resources: cpus(*):2; mem(*):1024; disk(*):1024; ports(*):[31000-32000]
I0204 02:57:24.472874 14749 slave.cpp:547] Agent attributes: [  ]
I0204 02:57:24.472887 14749 slave.cpp:552] Agent hostname: 5840ebddabc0
I0204 02:57:24.473032 14741 status_update_manager.cpp:177] Pausing sending status updates
I0204 02:57:24.474231 14744 state.cpp:60] Recovering state from '/tmp/Endpoint_SlaveEndpointTest_UnauthorizedRequest_0_tFJFqR/meta'
I0204 02:57:24.474586 14750 status_update_manager.cpp:203] Recovering status update manager
I0204 02:57:24.474918 14746 containerizer.cpp:599] Recovering containerizer
I0204 02:57:24.476614 14741 provisioner.cpp:410] Provisioner recovery complete
I0204 02:57:24.477120 14748 slave.cpp:5422] Finished recovery
I0204 02:57:24.477762 14748 slave.cpp:5596] Querying resource estimator for oversubscribable resources
I0204 02:57:24.478076 14741 slave.cpp:5610] Received oversubscribable resources {} from the resource estimator
I0204 02:57:24.480458 14745 process.cpp:3697] Handling HTTP event for process 'slave(696)' with path: '/slave(696)/monitor/statistics'
I0204 02:57:24.482137 14744 http.cpp:871] Authorizing principal 'test-principal' to GET the '/monitor/statistics' endpoint
I0204 02:57:24.484875 14742 slave.cpp:801] Agent terminating
[       OK ] Endpoint/SlaveEndpointTest.UnauthorizedRequest/0 (29 ms)
[ RUN      ] Endpoint/SlaveEndpointTest.UnauthorizedRequest/1
I0204 02:57:24.494992 32237 containerizer.cpp:220] Using isolation: posix/cpu,posix/mem,filesystem/posix,network/cni
W0204 02:57:24.495578 32237 backend.cpp:76] Failed to create 'aufs' backend: AufsBackend requires root privileges
W0204 02:57:24.495692 32237 backend.cpp:76] Failed to create 'bind' backend: BindBackend requires root privileges
I0204 02:57:24.495731 32237 provisioner.cpp:249] Using default backend 'copy'
I0204 02:57:24.499552 14754 slave.cpp:209] Mesos agent started on (697)@172.17.0.2:45507
I0204 02:57:24.499580 14754 slave.cpp:210] Flags at startup: --acls="" --appc_simple_discovery_uri_prefix="http://" --appc_store_dir="/tmp/mesos/store/appc" --authenticate_http_readonly="true" --authenticate_http_readwrite="true" --authenticatee="crammd5" --authentication_backoff_factor="1secs" --authorizer="local" --cgroups_cpu_enable_pids_and_tids_count="false" --cgroups_enable_cfs="false" --cgroups_hierarchy="/sys/fs/cgroup" --cgroups_limit_swap="false" --cgroups_root="mesos" --container_disk_watch_interval="15secs" --containerizers="mesos" --credential="/tmp/Endpoint_SlaveEndpointTest_UnauthorizedRequest_1_cMyZoR/credential" --default_role="*" --disk_watch_interval="1mins" --docker="docker" --docker_kill_orphans="true" --docker_registry="https://registry-1.docker.io" --docker_remove_delay="6hrs" --docker_socket="/var/run/docker.sock" --docker_stop_timeout="0ns" --docker_store_dir="/tmp/mesos/store/docker" --docker_volume_checkpoint_dir="/var/run/mesos/isolators/docker/volume" --enforce_container_disk_quota="false" --executor_registration_timeout="1mins" --executor_shutdown_grace_period="5secs" --fetcher_cache_dir="/tmp/Endpoint_SlaveEndpointTest_UnauthorizedRequest_1_cMyZoR/fetch" --fetcher_cache_size="2GB" --frameworks_home="" --gc_delay="1weeks" --gc_disk_headroom="0.1" --hadoop_home="" --help="false" --hostname_lookup="true" --http_authenticators="basic" --http_command_executor="false" --http_credentials="/tmp/Endpoint_SlaveEndpointTest_UnauthorizedRequest_1_cMyZoR/http_credentials" --http_heartbeat_interval="30secs" --initialize_driver_logging="true" --isolation="posix/cpu,posix/mem" --launcher="posix" --launcher_dir="/mesos/mesos-1.2.0/_build/src" --logbufsecs="0" --logging_level="INFO" --max_completed_executors_per_framework="150" --oversubscribed_resources_interval="15secs" --perf_duration="10secs" --perf_interval="1mins" --qos_correction_interval_min="0ns" --quiet="false" --recover="reconnect" --recovery_timeout="15mins" --registration_backoff_factor="10ms" --resources="cpus:2;gpus:0;mem:1024;disk:1024;ports:[31000-32000]" --revocable_cpu_low_priority="true" --runtime_dir="/tmp/Endpoint_SlaveEndpointTest_UnauthorizedRequest_1_cMyZoR" --sandbox_directory="/mnt/mesos/sandbox" --strict="true" --switch_user="true" --systemd_enable_support="true" --systemd_runtime_directory="/run/systemd/system" --version="false" --work_dir="/tmp/Endpoint_SlaveEndpointTest_UnauthorizedRequest_1_JE60YE"
I0204 02:57:24.500371 14754 credentials.hpp:86] Loading credential for authentication from '/tmp/Endpoint_SlaveEndpointTest_UnauthorizedRequest_1_cMyZoR/credential'
I0204 02:57:24.500555 14754 slave.cpp:352] Agent using credential for: test-principal
I0204 02:57:24.500581 14754 credentials.hpp:37] Loading credentials for authentication from '/tmp/Endpoint_SlaveEndpointTest_UnauthorizedRequest_1_cMyZoR/http_credentials'
I0204 02:57:24.500851 14754 http.cpp:919] Using default 'basic' HTTP authenticator for realm 'mesos-agent-readonly'
I0204 02:57:24.501163 14754 http.cpp:919] Using default 'basic' HTTP authenticator for realm 'mesos-agent-readwrite'
I0204 02:57:24.502529 14754 slave.cpp:539] Agent resources: cpus(*):2; mem(*):1024; disk(*):1024; ports(*):[31000-32000]
I0204 02:57:24.502617 14754 slave.cpp:547] Agent attributes: [  ]
I0204 02:57:24.502660 14754 slave.cpp:552] Agent hostname: 5840ebddabc0
I0204 02:57:24.502794 14742 status_update_manager.cpp:177] Pausing sending status updates
I0204 02:57:24.504148 14749 state.cpp:60] Recovering state from '/tmp/Endpoint_SlaveEndpointTest_UnauthorizedRequest_1_JE60YE/meta'
I0204 02:57:24.504539 14750 status_update_manager.cpp:203] Recovering status update manager
I0204 02:57:24.504778 14745 containerizer.cpp:599] Recovering containerizer
I0204 02:57:24.506182 14740 provisioner.cpp:410] Provisioner recovery complete
I0204 02:57:24.506628 14751 slave.cpp:5422] Finished recovery
I0204 02:57:24.507128 14751 slave.cpp:5596] Querying resource estimator for oversubscribable resources
I0204 02:57:24.507355 14742 slave.cpp:5610] Received oversubscribable resources {} from the resource estimator
I0204 02:57:24.509490 14754 process.cpp:3697] Handling HTTP event for process 'slave(697)' with path: '/slave(697)/monitor/statistics.json'
I0204 02:57:24.510869 14754 http.cpp:871] Authorizing principal 'test-principal' to GET the '/monitor/statistics.json' endpoint
I0204 02:57:24.513273 14748 slave.cpp:801] Agent terminating
[       OK ] Endpoint/SlaveEndpointTest.UnauthorizedRequest/1 (28 ms)
[ RUN      ] Endpoint/SlaveEndpointTest.UnauthorizedRequest/2
I0204 02:57:24.524503 32237 containerizer.cpp:220] Using isolation: posix/cpu,posix/mem,filesystem/posix,network/cni
W0204 02:57:24.525123 32237 backend.cpp:76] Failed to create 'aufs' backend: AufsBackend requires root privileges
W0204 02:57:24.525239 32237 backend.cpp:76] Failed to create 'bind' backend: BindBackend requires root privileges
I0204 02:57:24.525290 32237 provisioner.cpp:249] Using default backend 'copy'
I0204 02:57:24.528867 14747 slave.cpp:209] Mesos agent started on (698)@172.17.0.2:45507
I0204 02:57:24.528976 14747 slave.cpp:210] Flags at startup: --acls="" --appc_simple_discovery_uri_prefix="http://" --appc_store_dir="/tmp/mesos/store/appc" --authenticate_http_readonly="true" --authenticate_http_readwrite="true" --authenticatee="crammd5" --authentication_backoff_factor="1secs" --authorizer="local" --cgroups_cpu_enable_pids_and_tids_count="false" --cgroups_enable_cfs="false" --cgroups_hierarchy="/sys/fs/cgroup" --cgroups_limit_swap="false" --cgroups_root="mesos" --container_disk_watch_interval="15secs" --containerizers="mesos" --credential="/tmp/Endpoint_SlaveEndpointTest_UnauthorizedRequest_2_e0u2kF/credential" --default_role="*" --disk_watch_interval="1mins" --docker="docker" --docker_kill_orphans="true" --docker_registry="https://registry-1.docker.io" --docker_remove_delay="6hrs" --docker_socket="/var/run/docker.sock" --docker_stop_timeout="0ns" --docker_store_dir="/tmp/mesos/store/docker" --docker_volume_checkpoint_dir="/var/run/mesos/isolators/docker/volume" --enforce_container_disk_quota="false" --executor_registration_timeout="1mins" --executor_shutdown_grace_period="5secs" --fetcher_cache_dir="/tmp/Endpoint_SlaveEndpointTest_UnauthorizedRequest_2_e0u2kF/fetch" --fetcher_cache_size="2GB" --frameworks_home="" --gc_delay="1weeks" --gc_disk_headroom="0.1" --hadoop_home="" --help="false" --hostname_lookup="true" --http_authenticators="basic" --http_command_executor="false" --http_credentials="/tmp/Endpoint_SlaveEndpointTest_UnauthorizedRequest_2_e0u2kF/http_credentials" --http_heartbeat_interval="30secs" --initialize_driver_logging="true" --isolation="posix/cpu,posix/mem" --launcher="posix" --launcher_dir="/mesos/mesos-1.2.0/_build/src" --logbufsecs="0" --logging_level="INFO" --max_completed_executors_per_framework="150" --oversubscribed_resources_interval="15secs" --perf_duration="10secs" --perf_interval="1mins" --qos_correction_interval_min="0ns" --quiet="false" --recover="reconnect" --recovery_timeout="15mins" --registration_backoff_factor="10ms" --resources="cpus:2;gpus:0;mem:1024;disk:1024;ports:[31000-32000]" --revocable_cpu_low_priority="true" --runtime_dir="/tmp/Endpoint_SlaveEndpointTest_UnauthorizedRequest_2_e0u2kF" --sandbox_directory="/mnt/mesos/sandbox" --strict="true" --switch_user="true" --systemd_enable_support="true" --systemd_runtime_directory="/run/systemd/system" --version="false" --work_dir="/tmp/Endpoint_SlaveEndpointTest_UnauthorizedRequest_2_7JBlQs"
I0204 02:57:24.529614 14747 credentials.hpp:86] Loading credential for authentication from '/tmp/Endpoint_SlaveEndpointTest_UnauthorizedRequest_2_e0u2kF/credential'
I0204 02:57:24.529790 14747 slave.cpp:352] Agent using credential for: test-principal
I0204 02:57:24.529822 14747 credentials.hpp:37] Loading credentials for authentication from '/tmp/Endpoint_SlaveEndpointTest_UnauthorizedRequest_2_e0u2kF/http_credentials'
I0204 02:57:24.530119 14747 http.cpp:919] Using default 'basic' HTTP authenticator for realm 'mesos-agent-readonly'
I0204 02:57:24.530292 14747 http.cpp:919] Using default 'basic' HTTP authenticator for realm 'mesos-agent-readwrite'
I0204 02:57:24.531893 14747 slave.cpp:539] Agent resources: cpus(*):2; mem(*):1024; disk(*):1024; ports(*):[31000-32000]
I0204 02:57:24.532025 14747 slave.cpp:547] Agent attributes: [  ]
I0204 02:57:24.532052 14747 slave.cpp:552] Agent hostname: 5840ebddabc0
I0204 02:57:24.532208 14748 status_update_manager.cpp:177] Pausing sending status updates
I0204 02:57:24.533594 14750 state.cpp:60] Recovering state from '/tmp/Endpoint_SlaveEndpointTest_UnauthorizedRequest_2_7JBlQs/meta'
I0204 02:57:24.533972 14741 status_update_manager.cpp:203] Recovering status update manager
I0204 02:57:24.534198 14743 containerizer.cpp:599] Recovering containerizer
I0204 02:57:24.535799 14752 provisioner.cpp:410] Provisioner recovery complete
I0204 02:57:24.536332 14742 slave.cpp:5422] Finished recovery
I0204 02:57:24.536875 14742 slave.cpp:5596] Querying resource estimator for oversubscribable resources
I0204 02:57:24.537209 14749 slave.cpp:5610] Received oversubscribable resources {} from the resource estimator
I0204 02:57:24.539369 14750 process.cpp:3697] Handling HTTP event for process 'slave(698)' with path: '/slave(698)/containers'
I0204 02:57:24.540818 14742 http.cpp:871] Authorizing principal 'test-principal' to GET the '/containers' endpoint
I0204 02:57:24.542990 14748 slave.cpp:801] Agent terminating
[       OK ] Endpoint/SlaveEndpointTest.UnauthorizedRequest/2 (29 ms)
[ RUN      ] Endpoint/SlaveEndpointTest.NoAuthorizer/0
I0204 02:57:24.553555 32237 containerizer.cpp:220] Using isolation: posix/cpu,posix/mem,filesystem/posix,network/cni
W0204 02:57:24.554147 32237 backend.cpp:76] Failed to create 'aufs' backend: AufsBackend requires root privileges
W0204 02:57:24.554275 32237 backend.cpp:76] Failed to create 'bind' backend: BindBackend requires root privileges
I0204 02:57:24.554325 32237 provisioner.cpp:249] Using default backend 'copy'
I0204 02:57:24.556068 32237 cluster.cpp:446] Creating default 'local' authorizer
I0204 02:57:24.557422 14746 slave.cpp:209] Mesos agent started on (699)@172.17.0.2:45507
I0204 02:57:24.557445 14746 slave.cpp:210] Flags at startup: --acls="" --appc_simple_discovery_uri_prefix="http://" --appc_store_dir="/tmp/mesos/store/appc" --authenticate_http_readonly="true" --authenticate_http_readwrite="true" --authenticatee="crammd5" --authentication_backoff_factor="1secs" --authorizer="local" --cgroups_cpu_enable_pids_and_tids_count="false" --cgroups_enable_cfs="false" --cgroups_hierarchy="/sys/fs/cgroup" --cgroups_limit_swap="false" --cgroups_root="mesos" --container_disk_watch_interval="15secs" --containerizers="mesos" --credential="/tmp/Endpoint_SlaveEndpointTest_NoAuthorizer_0_oZE9zt/credential" --default_role="*" --disk_watch_interval="1mins" --docker="docker" --docker_kill_orphans="true" --docker_registry="https://registry-1.docker.io" --docker_remove_delay="6hrs" --docker_socket="/var/run/docker.sock" --docker_stop_timeout="0ns" --docker_store_dir="/tmp/mesos/store/docker" --docker_volume_checkpoint_dir="/var/run/mesos/isolators/docker/volume" --enforce_container_disk_quota="false" --executor_registration_timeout="1mins" --executor_shutdown_grace_period="5secs" --fetcher_cache_dir="/tmp/Endpoint_SlaveEndpointTest_NoAuthorizer_0_oZE9zt/fetch" --fetcher_cache_size="2GB" --frameworks_home="" --gc_delay="1weeks" --gc_disk_headroom="0.1" --hadoop_home="" --help="false" --hostname_lookup="true" --http_authenticators="basic" --http_command_executor="false" --http_credentials="/tmp/Endpoint_SlaveEndpointTest_NoAuthorizer_0_oZE9zt/http_credentials" --http_heartbeat_interval="30secs" --initialize_driver_logging="true" --isolation="posix/cpu,posix/mem" --launcher="posix" --launcher_dir="/mesos/mesos-1.2.0/_build/src" --logbufsecs="0" --logging_level="INFO" --max_completed_executors_per_framework="150" --oversubscribed_resources_interval="15secs" --perf_duration="10secs" --perf_interval="1mins" --qos_correction_interval_min="0ns" --quiet="false" --recover="reconnect" --recovery_timeout="15mins" --registration_backoff_factor="10ms" --resources="cpus:2;gpus:0;mem:1024;disk:1024;ports:[31000-32000]" --revocable_cpu_low_priority="true" --runtime_dir="/tmp/Endpoint_SlaveEndpointTest_NoAuthorizer_0_oZE9zt" --sandbox_directory="/mnt/mesos/sandbox" --strict="true" --switch_user="true" --systemd_enable_support="true" --systemd_runtime_directory="/run/systemd/system" --version="false" --work_dir="/tmp/Endpoint_SlaveEndpointTest_NoAuthorizer_0_PHZI0g"
I0204 02:57:24.557976 14746 credentials.hpp:86] Loading credential for authentication from '/tmp/Endpoint_SlaveEndpointTest_NoAuthorizer_0_oZE9zt/credential'
I0204 02:57:24.558095 14746 slave.cpp:352] Agent using credential for: test-principal
I0204 02:57:24.558115 14746 credentials.hpp:37] Loading credentials for authentication from '/tmp/Endpoint_SlaveEndpointTest_NoAuthorizer_0_oZE9zt/http_credentials'
I0204 02:57:24.558300 14746 http.cpp:919] Using default 'basic' HTTP authenticator for realm 'mesos-agent-readonly'
I0204 02:57:24.558418 14746 http.cpp:919] Using default 'basic' HTTP authenticator for realm 'mesos-agent-readwrite'
I0204 02:57:24.559489 14746 slave.cpp:539] Agent resources: cpus(*):2; mem(*):1024; disk(*):1024; ports(*):[31000-32000]
I0204 02:57:24.559561 14746 slave.cpp:547] Agent attributes: [  ]
I0204 02:57:24.559573 14746 slave.cpp:552] Agent hostname: 5840ebddabc0
I0204 02:57:24.559686 14745 status_update_manager.cpp:177] Pausing sending status updates
I0204 02:57:24.560973 14755 state.cpp:60] Recovering state from '/tmp/Endpoint_SlaveEndpointTest_NoAuthorizer_0_PHZI0g/meta'
I0204 02:57:24.561414 14748 status_update_manager.cpp:203] Recovering status update manager
I0204 02:57:24.561640 14749 containerizer.cpp:599] Recovering containerizer
I0204 02:57:24.562883 14750 provisioner.cpp:410] Provisioner recovery complete
I0204 02:57:24.563289 14753 slave.cpp:5422] Finished recovery
I0204 02:57:24.563695 14753 slave.cpp:5596] Querying resource estimator for oversubscribable resources
I0204 02:57:24.563892 14755 slave.cpp:5610] Received oversubscribable resources {} from the resource estimator
I0204 02:57:24.565608 14741 process.cpp:3697] Handling HTTP event for process 'slave(699)' with path: '/slave(699)/monitor/statistics'
I0204 02:57:24.566718 14752 http.cpp:871] Authorizing principal 'test-principal' to GET the '/monitor/statistics' endpoint
I0204 02:57:24.569921 14750 slave.cpp:801] Agent terminating
[       OK ] Endpoint/SlaveEndpointTest.NoAuthorizer/0 (27 ms)
[ RUN      ] Endpoint/SlaveEndpointTest.NoAuthorizer/1
I0204 02:57:24.580909 32237 containerizer.cpp:220] Using isolation: posix/cpu,posix/mem,filesystem/posix,network/cni
W0204 02:57:24.581362 32237 backend.cpp:76] Failed to create 'aufs' backend: AufsBackend requires root privileges
W0204 02:57:24.581446 32237 backend.cpp:76] Failed to create 'bind' backend: BindBackend requires root privileges
I0204 02:57:24.581475 32237 provisioner.cpp:249] Using default backend 'copy'
I0204 02:57:24.582710 32237 cluster.cpp:446] Creating default 'local' authorizer
I0204 02:57:24.584034 14749 slave.cpp:209] Mesos agent started on (700)@172.17.0.2:45507
I0204 02:57:24.584056 14749 slave.cpp:210] Flags at startup: --acls="" --appc_simple_discovery_uri_prefix="http://" --appc_store_dir="/tmp/mesos/store/appc" --authenticate_http_readonly="true" --authenticate_http_readwrite="true" --authenticatee="crammd5" --authentication_backoff_factor="1secs" --authorizer="local" --cgroups_cpu_enable_pids_and_tids_count="false" --cgroups_enable_cfs="false" --cgroups_hierarchy="/sys/fs/cgroup" --cgroups_limit_swap="false" --cgroups_root="mesos" --container_disk_watch_interval="15secs" --containerizers="mesos" --credential="/tmp/Endpoint_SlaveEndpointTest_NoAuthorizer_1_oN126h/credential" --default_role="*" --disk_watch_interval="1mins" --docker="docker" --docker_kill_orphans="true" --docker_registry="https://registry-1.docker.io" --docker_remove_delay="6hrs" --docker_socket="/var/run/docker.sock" --docker_stop_timeout="0ns" --docker_store_dir="/tmp/mesos/store/docker" --docker_volume_checkpoint_dir="/var/run/mesos/isolators/docker/volume" --enforce_container_disk_quota="false" --executor_registration_timeout="1mins" --executor_shutdown_grace_period="5secs" --fetcher_cache_dir="/tmp/Endpoint_SlaveEndpointTest_NoAuthorizer_1_oN126h/fetch" --fetcher_cache_size="2GB" --frameworks_home="" --gc_delay="1weeks" --gc_disk_headroom="0.1" --hadoop_home="" --help="false" --hostname_lookup="true" --http_authenticators="basic" --http_command_executor="false" --http_credentials="/tmp/Endpoint_SlaveEndpointTest_NoAuthorizer_1_oN126h/http_credentials" --http_heartbeat_interval="30secs" --initialize_driver_logging="true" --isolation="posix/cpu,posix/mem" --launcher="posix" --launcher_dir="/mesos/mesos-1.2.0/_build/src" --logbufsecs="0" --logging_level="INFO" --max_completed_executors_per_framework="150" --oversubscribed_resources_interval="15secs" --perf_duration="10secs" --perf_interval="1mins" --qos_correction_interval_min="0ns" --quiet="false" --recover="reconnect" --recovery_timeout="15mins" --registration_backoff_factor="10ms" --resources="cpus:2;gpus:0;mem:1024;disk:1024;ports:[31000-32000]" --revocable_cpu_low_priority="true" --runtime_dir="/tmp/Endpoint_SlaveEndpointTest_NoAuthorizer_1_oN126h" --sandbox_directory="/mnt/mesos/sandbox" --strict="true" --switch_user="true" --systemd_enable_support="true" --systemd_runtime_directory="/run/systemd/system" --version="false" --work_dir="/tmp/Endpoint_SlaveEndpointTest_NoAuthorizer_1_1pZ8s5"
I0204 02:57:24.584537 14749 credentials.hpp:86] Loading credential for authentication from '/tmp/Endpoint_SlaveEndpointTest_NoAuthorizer_1_oN126h/credential'
I0204 02:57:24.584658 14749 slave.cpp:352] Agent using credential for: test-principal
I0204 02:57:24.584678 14749 credentials.hpp:37] Loading credentials for authentication from '/tmp/Endpoint_SlaveEndpointTest_NoAuthorizer_1_oN126h/http_credentials'
I0204 02:57:24.584861 14749 http.cpp:919] Using default 'basic' HTTP authenticator for realm 'mesos-agent-readonly'
I0204 02:57:24.584980 14749 http.cpp:919] Using default 'basic' HTTP authenticator for realm 'mesos-agent-readwrite'
I0204 02:57:24.586107 14749 slave.cpp:539] Agent resources: cpus(*):2; mem(*):1024; disk(*):1024; ports(*):[31000-32000]
I0204 02:57:24.586180 14749 slave.cpp:547] Agent attributes: [  ]
I0204 02:57:24.586194 14749 slave.cpp:552] Agent hostname: 5840ebddabc0
I0204 02:57:24.586288 14741 status_update_manager.cpp:177] Pausing sending status updates
I0204 02:57:24.587267 14746 state.cpp:60] Recovering state from '/tmp/Endpoint_SlaveEndpointTest_NoAuthorizer_1_1pZ8s5/meta'
I0204 02:57:24.587473 14740 status_update_manager.cpp:203] Recovering status update manager
I0204 02:57:24.587666 14747 containerizer.cpp:599] Recovering containerizer
I0204 02:57:24.588898 14754 provisioner.cpp:410] Provisioner recovery complete
I0204 02:57:24.589248 14750 slave.cpp:5422] Finished recovery
I0204 02:57:24.589689 14750 slave.cpp:5596] Querying resource estimator for oversubscribable resources
I0204 02:57:24.589931 14741 slave.cpp:5610] Received oversubscribable resources {} from the resource estimator
I0204 02:57:24.591886 14748 process.cpp:3697] Handling HTTP event for process 'slave(700)' with path: '/slave(700)/monitor/statistics.json'
I0204 02:57:24.593678 14746 http.cpp:871] Authorizing principal 'test-principal' to GET the '/monitor/statistics.json' endpoint
I0204 02:57:24.597797 14748 slave.cpp:801] Agent terminating
[       OK ] Endpoint/SlaveEndpointTest.NoAuthorizer/1 (28 ms)
[ RUN      ] Endpoint/SlaveEndpointTest.NoAuthorizer/2
I0204 02:57:24.609025 32237 containerizer.cpp:220] Using isolation: posix/cpu,posix/mem,filesystem/posix,network/cni
W0204 02:57:24.609611 32237 backend.cpp:76] Failed to create 'aufs' backend: AufsBackend requires root privileges
W0204 02:57:24.609725 32237 backend.cpp:76] Failed to create 'bind' backend: BindBackend requires root privileges
I0204 02:57:24.609763 32237 provisioner.cpp:249] Using default backend 'copy'
I0204 02:57:24.611410 32237 cluster.cpp:446] Creating default 'local' authorizer
I0204 02:57:24.613289 14741 slave.cpp:209] Mesos agent started on (701)@172.17.0.2:45507
I0204 02:57:24.613318 14741 slave.cpp:210] Flags at startup: --acls="" --appc_simple_discovery_uri_prefix="http://" --appc_store_dir="/tmp/mesos/store/appc" --authenticate_http_readonly="true" --authenticate_http_readwrite="true" --authenticatee="crammd5" --authentication_backoff_factor="1secs" --authorizer="local" --cgroups_cpu_enable_pids_and_tids_count="false" --cgroups_enable_cfs="false" --cgroups_hierarchy="/sys/fs/cgroup" --cgroups_limit_swap="false" --cgroups_root="mesos" --container_disk_watch_interval="15secs" --containerizers="mesos" --credential="/tmp/Endpoint_SlaveEndpointTest_NoAuthorizer_2_RHGOV6/credential" --default_role="*" --disk_watch_interval="1mins" --docker="docker" --docker_kill_orphans="true" --docker_registry="https://registry-1.docker.io" --docker_remove_delay="6hrs" --docker_socket="/var/run/docker.sock" --docker_stop_timeout="0ns" --docker_store_dir="/tmp/mesos/store/docker" --docker_volume_checkpoint_dir="/var/run/mesos/isolators/docker/volume" --enforce_container_disk_quota="false" --executor_registration_timeout="1mins" --executor_shutdown_grace_period="5secs" --fetcher_cache_dir="/tmp/Endpoint_SlaveEndpointTest_NoAuthorizer_2_RHGOV6/fetch" --fetcher_cache_size="2GB" --frameworks_home="" --gc_delay="1weeks" --gc_disk_headroom="0.1" --hadoop_home="" --help="false" --hostname_lookup="true" --http_authenticators="basic" --http_command_executor="false" --http_credentials="/tmp/Endpoint_SlaveEndpointTest_NoAuthorizer_2_RHGOV6/http_credentials" --http_heartbeat_interval="30secs" --initialize_driver_logging="true" --isolation="posix/cpu,posix/mem" --launcher="posix" --launcher_dir="/mesos/mesos-1.2.0/_build/src" --logbufsecs="0" --logging_level="INFO" --max_completed_executors_per_framework="150" --oversubscribed_resources_interval="15secs" --perf_duration="10secs" --perf_interval="1mins" --qos_correction_interval_min="0ns" --quiet="false" --recover="reconnect" --recovery_timeout="15mins" --registration_backoff_factor="10ms" --resources="cpus:2;gpus:0;mem:1024;disk:1024;ports:[31000-32000]" --revocable_cpu_low_priority="true" --runtime_dir="/tmp/Endpoint_SlaveEndpointTest_NoAuthorizer_2_RHGOV6" --sandbox_directory="/mnt/mesos/sandbox" --strict="true" --switch_user="true" --systemd_enable_support="true" --systemd_runtime_directory="/run/systemd/system" --version="false" --work_dir="/tmp/Endpoint_SlaveEndpointTest_NoAuthorizer_2_qRMmdU"
I0204 02:57:24.614037 14741 credentials.hpp:86] Loading credential for authentication from '/tmp/Endpoint_SlaveEndpointTest_NoAuthorizer_2_RHGOV6/credential'
I0204 02:57:24.614197 14741 slave.cpp:352] Agent using credential for: test-principal
I0204 02:57:24.614223 14741 credentials.hpp:37] Loading credentials for authentication from '/tmp/Endpoint_SlaveEndpointTest_NoAuthorizer_2_RHGOV6/http_credentials'
I0204 02:57:24.614478 14741 http.cpp:919] Using default 'basic' HTTP authenticator for realm 'mesos-agent-readonly'
I0204 02:57:24.614634 14741 http.cpp:919] Using default 'basic' HTTP authenticator for realm 'mesos-agent-readwrite'
I0204 02:57:24.616614 14741 slave.cpp:539] Agent resources: cpus(*):2; mem(*):1024; disk(*):1024; ports(*):[31000-32000]
I0204 02:57:24.616719 14741 slave.cpp:547] Agent attributes: [  ]
I0204 02:57:24.616734 14741 slave.cpp:552] Agent hostname: 5840ebddabc0
I0204 02:57:24.616969 14755 status_update_manager.cpp:177] Pausing sending status updates
I0204 02:57:24.618190 14744 state.cpp:60] Recovering state from '/tmp/Endpoint_SlaveEndpointTest_NoAuthorizer_2_qRMmdU/meta'
I0204 02:57:24.618527 14741 status_update_manager.cpp:203] Recovering status update manager
I0204 02:57:24.618839 14740 containerizer.cpp:599] Recovering containerizer
I0204 02:57:24.620431 14748 provisioner.cpp:410] Provisioner recovery complete
I0204 02:57:24.620893 14753 slave.cpp:5422] Finished recovery
I0204 02:57:24.621654 14753 slave.cpp:5596] Querying resource estimator for oversubscribable resources
I0204 02:57:24.622025 14753 slave.cpp:5610] Received oversubscribable resources {} from the resource estimator
I0204 02:57:24.624191 14746 process.cpp:3697] Handling HTTP event for process 'slave(701)' with path: '/slave(701)/containers'
I0204 02:57:24.625519 14747 http.cpp:871] Authorizing principal 'test-principal' to GET the '/containers' endpoint
I0204 02:57:24.629582 32237 slave.cpp:801] Agent terminating
[       OK ] Endpoint/SlaveEndpointTest.NoAuthorizer/2 (32 ms)
[----------] 9 tests from Endpoint/SlaveEndpointTest (273 ms total)

[----------] Global test environment tear-down
[==========] 1480 tests from 166 test cases ran. (486093 ms total)
[  PASSED  ] 1479 tests.
[  FAILED  ] 1 test, listed below:
[  FAILED  ] MasterTest.AgentRestartNoReregister

 1 FAILED TEST
  YOU HAVE 14 DISABLED TESTS

make[4]: *** [check-local] Error 1
make[4]: Leaving directory `/mesos/mesos-1.2.0/_build/src'
make[3]: *** [check-am] Error 2
make[3]: Leaving directory `/mesos/mesos-1.2.0/_build/src'
make[2]: *** [check] Error 2
make[2]: Leaving directory `/mesos/mesos-1.2.0/_build/src'
make[1]: *** [check-recursive] Error 1
make[1]: Leaving directory `/mesos/mesos-1.2.0/_build'
make: *** [distcheck] Error 1
+ docker rmi mesos-1486173990-8585
Untagged: mesos-1486173990-8585:latest
Deleted: sha256:348cca710e0ff3cd4b50d368ed8d4099b670a796c726bf7a6c47be57a35598f6
Deleted: sha256:61504977f51b4e7d6fac2f5a0366ada59c0f39c88b2b1d86427a08b75f2dadc8
Deleted: sha256:36e5adc594871b99301f754f1a75a82c8f027fce43b351e13dae512a2e2fd34f
Deleted: sha256:b870b0ab324b5b67702fefd8adff3ebce5c207c17d67525ba6c38760cc54bbbd
Deleted: sha256:8fdbcf682043ce2a960fa9e602e1f02d4db40f0a4c98c63137a7a074665c13d5
Deleted: sha256:69730ff1ac0bf5988272c0ffcf3a467c9fec9f0d859755fe98778525a7ede35d
Deleted: sha256:3b59952c17263d68c6c382a5e8e53219525040a642139f8147f550af46ddc58b
Deleted: sha256:cd8537846bffaf4e6ba4d1493059276c6b2b973369714c494e71e3d2b74050e3
Deleted: sha256:2a75a21c4000af4886447ed2028fcf13f58c6fb36b703f436a9818df403ca369
Deleted: sha256:14b8e2da428730823c8ef2938a08ad98fc08ae9740098fd09bd8f40d740d72d0
Deleted: sha256:549ac37d2c43a0ea4b026c180a2a4e12c1f157822aa62dc521e0474d176de646
Deleted: sha256:6933d8b4922773ab6dbbcb57a1f0d73059340bb1b7f09b2acb4e5b7f3ab0a398
Deleted: sha256:393f099d6d60244d19c814886daa09ae15257868f4b1702d72aa9e2c0dce59e4
Deleted: sha256:f10177c9ae6fa93188d924dd20d003e8709af68096e92bf400711a6c8a9c6516
Deleted: sha256:7eb839e2be539aa26d93e2544af4ba0f0f7d2ff1f19500c55230fc90a75430e7
Deleted: sha256:94583be5b77cd063721bbda00476a442f311c9a0b19b79b51c8ec882aaab98f3
Deleted: sha256:31e37a97820da00a3dcc9be543490d7160b1e15805ccdbcc58bc4098d6711105
Deleted: sha256:c72209bc0368a1c5a7b6b6569f2824e859ca07d095926415dc595360528aa1cf
Deleted: sha256:1c9f71800e2cd20c13a0f67ab76d02726234e96768746c35b6f5f0cdc3d8ce41
Deleted: sha256:34bf65bbbe29de8e19e1d2ec63deba7ad02580ff89ddcccc74b57d1103dbe5b1
Deleted: sha256:50990effc6f224f1868254079982be88300c01081414d55729ce54e2625cca33
Deleted: sha256:75eb3646758a666d8210c34ecd52e9eea9d8d8db2018216d80329b0e92b1e06a
Deleted: sha256:7b0dd43327e74dcd4e6b4294875bc98835ff39ac3606ae1a6f5e93603115f1bf
Deleted: sha256:a52e8217407d2a95cbf9546eb047ed22499d32fc5ef2e38dde3593e36ffcbc39
Build step 'Execute shell' marked build as failure

Jenkins build is back to normal : Mesos-Buildbot » autotools,gcc,--verbose --enable-libevent --enable-ssl,GLOG_v=1 MESOS_VERBOSE=1,ubuntu:14.04,(docker||Hadoop)&&(!ubuntu-us1)&&(!ubuntu-eu2) #3221

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/Mesos-Buildbot/BUILDTOOL=autotools,COMPILER=gcc,CONFIGURATION=--verbose%20--enable-libevent%20--enable-ssl,ENVIRONMENT=GLOG_v=1%20MESOS_VERBOSE=1,OS=ubuntu%3A14.04,label_exp=(docker%7C%7CHadoop)&&(!ubuntu-us1)&&(!ubuntu-eu2)/3221/changes>


Build failed in Jenkins: Mesos-Buildbot » autotools,gcc,--verbose --enable-libevent --enable-ssl,GLOG_v=1 MESOS_VERBOSE=1,ubuntu:14.04,(docker||Hadoop)&&(!ubuntu-us1)&&(!ubuntu-eu2) #3220

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/Mesos-Buildbot/BUILDTOOL=autotools,COMPILER=gcc,CONFIGURATION=--verbose%20--enable-libevent%20--enable-ssl,ENVIRONMENT=GLOG_v=1%20MESOS_VERBOSE=1,OS=ubuntu%3A14.04,label_exp=(docker%7C%7CHadoop)&&(!ubuntu-us1)&&(!ubuntu-eu2)/3220/>

------------------------------------------
[...truncated 254 lines...]
Receiving objects:  71% (79470/111929), 71.26 MiB | 23.71 MiB/s   
Receiving objects:  72% (80589/111929), 71.26 MiB | 23.71 MiB/s   
Receiving objects:  73% (81709/111929), 71.26 MiB | 23.71 MiB/s   
Receiving objects:  74% (82828/111929), 71.26 MiB | 23.71 MiB/s   
Receiving objects:  75% (83947/111929), 71.26 MiB | 23.71 MiB/s   
Receiving objects:  76% (85067/111929), 71.26 MiB | 23.71 MiB/s   
Receiving objects:  77% (86186/111929), 71.26 MiB | 23.71 MiB/s   
Receiving objects:  78% (87305/111929), 71.26 MiB | 23.71 MiB/s   
Receiving objects:  79% (88424/111929), 71.26 MiB | 23.71 MiB/s   
Receiving objects:  80% (89544/111929), 71.26 MiB | 23.71 MiB/s   
Receiving objects:  81% (90663/111929), 71.26 MiB | 23.71 MiB/s   
Receiving objects:  82% (91782/111929), 71.26 MiB | 23.71 MiB/s   
Receiving objects:  83% (92902/111929), 71.26 MiB | 23.71 MiB/s   
Receiving objects:  84% (94021/111929), 88.49 MiB | 25.23 MiB/s   
Receiving objects:  85% (95140/111929), 88.49 MiB | 25.23 MiB/s   
Receiving objects:  85% (95568/111929), 105.96 MiB | 26.44 MiB/s   
Receiving objects:  86% (96259/111929), 105.96 MiB | 26.44 MiB/s   
Receiving objects:  87% (97379/111929), 105.96 MiB | 26.44 MiB/s   
Receiving objects:  88% (98498/111929), 123.54 MiB | 27.40 MiB/s   
Receiving objects:  89% (99617/111929), 123.54 MiB | 27.40 MiB/s   
Receiving objects:  89% (100027/111929), 141.11 MiB | 29.43 MiB/s   
Receiving objects:  89% (100027/111929), 176.07 MiB | 29.57 MiB/s   
error: RPC failed; result=56, HTTP code = 200
fatal: The remote end hung up unexpectedly
fatal: early EOF
fatal: index-pack failed

	at org.jenkinsci.plugins.gitclient.CliGitAPIImpl.launchCommandIn(CliGitAPIImpl.java:1723)
	at org.jenkinsci.plugins.gitclient.CliGitAPIImpl.launchCommandWithCredentials(CliGitAPIImpl.java:1459)
	at org.jenkinsci.plugins.gitclient.CliGitAPIImpl.access$300(CliGitAPIImpl.java:63)
	at org.jenkinsci.plugins.gitclient.CliGitAPIImpl$1.execute(CliGitAPIImpl.java:314)
	at org.jenkinsci.plugins.gitclient.CliGitAPIImpl$2.execute(CliGitAPIImpl.java:506)
	at org.jenkinsci.plugins.gitclient.RemoteGitImpl$CommandInvocationHandler$1.call(RemoteGitImpl.java:152)
	at org.jenkinsci.plugins.gitclient.RemoteGitImpl$CommandInvocationHandler$1.call(RemoteGitImpl.java:145)
	at hudson.remoting.UserRequest.perform(UserRequest.java:153)
	at hudson.remoting.UserRequest.perform(UserRequest.java:50)
	at hudson.remoting.Request$2.run(Request.java:332)
	at hudson.remoting.InterceptingExecutorService$1.call(InterceptingExecutorService.java:68)
	at java.util.concurrent.FutureTask.run(FutureTask.java:262)
	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
	at java.lang.Thread.run(Thread.java:745)
	at ......remote call to H6(Native Method)
	at hudson.remoting.Channel.attachCallSiteStackTrace(Channel.java:1416)
	at hudson.remoting.UserResponse.retrieve(UserRequest.java:253)
	at hudson.remoting.Channel.call(Channel.java:781)
	at org.jenkinsci.plugins.gitclient.RemoteGitImpl$CommandInvocationHandler.execute(RemoteGitImpl.java:145)
	at sun.reflect.GeneratedMethodAccessor946.invoke(Unknown Source)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
	at java.lang.reflect.Method.invoke(Method.java:498)
	at org.jenkinsci.plugins.gitclient.RemoteGitImpl$CommandInvocationHandler.invoke(RemoteGitImpl.java:131)
	at com.sun.proxy.$Proxy177.execute(Unknown Source)
	at hudson.plugins.git.GitSCM.retrieveChanges(GitSCM.java:1046)
	at hudson.plugins.git.GitSCM.checkout(GitSCM.java:1086)
	at hudson.scm.SCM.checkout(SCM.java:485)
	at hudson.model.AbstractProject.checkout(AbstractProject.java:1269)
	at hudson.model.AbstractBuild$AbstractBuildExecution.defaultCheckout(AbstractBuild.java:604)
	at jenkins.scm.SCMCheckoutStrategy.checkout(SCMCheckoutStrategy.java:86)
	at hudson.model.AbstractBuild$AbstractBuildExecution.run(AbstractBuild.java:529)
	at hudson.model.Run.execute(Run.java:1741)
	at hudson.matrix.MatrixRun.run(MatrixRun.java:146)
	at hudson.model.ResourceController.execute(ResourceController.java:98)
	at hudson.model.Executor.run(Executor.java:410)
ERROR: null
Retrying after 10 seconds
Wiping out workspace first.
Cloning the remote Git repository
Cloning repository https://git-wip-us.apache.org/repos/asf/mesos.git
 > git init <https://builds.apache.org/job/Mesos-Buildbot/BUILDTOOL=autotools,COMPILER=gcc,CONFIGURATION=--verbose%20--enable-libevent%20--enable-ssl,ENVIRONMENT=GLOG_v=1%20MESOS_VERBOSE=1,OS=ubuntu%3A14.04,label_exp=(docker%7C%7CHadoop)&&(!ubuntu-us1)&&(!ubuntu-eu2)/ws/> # timeout=10
Fetching upstream changes from https://git-wip-us.apache.org/repos/asf/mesos.git
 > git --version # timeout=10
 > git -c core.askpass=true fetch --tags --progress https://git-wip-us.apache.org/repos/asf/mesos.git +refs/heads/*:refs/remotes/origin/*
ERROR: Error cloning remote repo 'origin'
hudson.plugins.git.GitException: Command "git -c core.askpass=true fetch --tags --progress https://git-wip-us.apache.org/repos/asf/mesos.git +refs/heads/*:refs/remotes/origin/*" returned status code 128:
stdout: 
stderr: remote: Counting objects: 111929, done.        
remote: Compressing objects:   0% (1/30772)           
remote: Compressing objects:   1% (308/30772)           
remote: Compressing objects:   2% (616/30772)           
remote: Compressing objects:   3% (924/30772)           
remote: Compressing objects:   4% (1231/30772)           
remote: Compressing objects:   5% (1539/30772)           
remote: Compressing objects:   6% (1847/30772)           
remote: Compressing objects:   7% (2155/30772)           
remote: Compressing objects:   8% (2462/30772)           
remote: Compressing objects:   9% (2770/30772)           
remote: Compressing objects:  10% (3078/30772)           
remote: Compressing objects:  11% (3385/30772)           
remote: Compressing objects:  12% (3693/30772)           
remote: Compressing objects:  13% (4001/30772)           
remote: Compressing objects:  14% (4309/30772)           
remote: Compressing objects:  15% (4616/30772)           
remote: Compressing objects:  16% (4924/30772)           
remote: Compressing objects:  17% (5232/30772)           
remote: Compressing objects:  18% (5539/30772)           
remote: Compressing objects:  19% (5847/30772)           
remote: Compressing objects:  20% (6155/30772)           
remote: Compressing objects:  21% (6463/30772)           
remote: Compressing objects:  22% (6770/30772)           
remote: Compressing objects:  23% (7078/30772)           
remote: Compressing objects:  24% (7386/30772)           
remote: Compressing objects:  25% (7693/30772)           
remote: Compressing objects:  26% (8001/30772)           
remote: Compressing objects:  27% (8309/30772)           
remote: Compressing objects:  28% (8617/30772)           
remote: Compressing objects:  29% (8924/30772)           
remote: Compressing objects:  30% (9232/30772)           
remote: Compressing objects:  31% (9540/30772)           
remote: Compressing objects:  32% (9848/30772)           
remote: Compressing objects:  33% (10155/30772)           
remote: Compressing objects:  34% (10463/30772)           
remote: Compressing objects:  35% (10771/30772)           
remote: Compressing objects:  36% (11078/30772)           
remote: Compressing objects:  37% (11386/30772)           
remote: Compressing objects:  38% (11694/30772)           
remote: Compressing objects:  39% (12002/30772)           
remote: Compressing objects:  40% (12309/30772)           
remote: Compressing objects:  41% (12617/30772)           
remote: Compressing objects:  42% (12925/30772)           
remote: Compressing objects:  43% (13232/30772)           
remote: Compressing objects:  44% (13540/30772)           
remote: Compressing objects:  45% (13848/30772)           
remote: Compressing objects:  46% (14156/30772)           
remote: Compressing objects:  47% (14463/30772)           
remote: Compressing objects:  48% (14771/30772)           
remote: Compressing objects:  49% (15079/30772)           
remote: Compressing objects:  50% (15386/30772)           
remote: Compressing objects:  51% (15694/30772)           
remote: Compressing objects:  52% (16002/30772)           
remote: Compressing objects:  53% (16310/30772)           
remote: Compressing objects:  54% (16617/30772)           
remote: Compressing objects:  55% (16925/30772)           
remote: Compressing objects:  56% (17233/30772)           
remote: Compressing objects:  57% (17541/30772)           
remote: Compressing objects:  58% (17848/30772)           
remote: Compressing objects:  59% (18156/30772)           
remote: Compressing objects:  60% (18464/30772)           
remote: Compressing objects:  61% (18771/30772)           
remote: Compressing objects:  62% (19079/30772)           
remote: Compressing objects:  63% (19387/30772)           
remote: Compressing objects:  64% (19695/30772)           
remote: Compressing objects:  65% (20002/30772)           
remote: Compressing objects:  66% (20310/30772)           
remote: Compressing objects:  67% (20618/30772)           
remote: Compressing objects:  68% (20925/30772)           
remote: Compressing objects:  69% (21233/30772)           
remote: Compressing objects:  70% (21541/30772)           
remote: Compressing objects:  71% (21849/30772)           
remote: Compressing objects:  72% (22156/30772)           
remote: Compressing objects:  73% (22464/30772)           
remote: Compressing objects:  74% (22772/30772)           
remote: Compressing objects:  75% (23079/30772)           
remote: Compressing objects:  76% (23387/30772)           
remote: Compressing objects:  77% (23695/30772)           
remote: Compressing objects:  78% (24003/30772)           
remote: Compressing objects:  79% (24310/30772)           
remote: Compressing objects:  80% (24618/30772)           
remote: Compressing objects:  81% (24926/30772)           
remote: Compressing objects:  82% (25234/30772)           
remote: Compressing objects:  83% (25541/30772)           
remote: Compressing objects:  84% (25849/30772)           
remote: Compressing objects:  85% (26157/30772)           
remote: Compressing objects:  86% (26464/30772)           
remote: Compressing objects:  87% (26772/30772)           
remote: Compressing objects:  88% (27080/30772)           
remote: Compressing objects:  89% (27388/30772)           
remote: Compressing objects:  90% (27695/30772)           
remote: Compressing objects:  91% (28003/30772)           
remote: Compressing objects:  92% (28311/30772)           
remote: Compressing objects:  93% (28618/30772)           
remote: Compressing objects:  94% (28926/30772)           
remote: Compressing objects:  95% (29234/30772)           
remote: Compressing objects:  96% (29542/30772)           
remote: Compressing objects:  97% (29849/30772)           
remote: Compressing objects:  98% (30157/30772)           
remote: Compressing objects:  99% (30465/30772)           
remote: Compressing objects: 100% (30772/30772)           
remote: Compressing objects: 100% (30772/30772), done.        
Receiving objects:   0% (1/111929)   
Receiving objects:   1% (1120/111929)   
Receiving objects:   2% (2239/111929)   
Receiving objects:   3% (3358/111929)   
Receiving objects:   4% (4478/111929)   
Receiving objects:   5% (5597/111929)   
Receiving objects:   6% (6716/111929)   
Receiving objects:   7% (7836/111929)   
Receiving objects:   8% (8955/111929)   
Receiving objects:   9% (10074/111929)   
Receiving objects:  10% (11193/111929)   
Receiving objects:  11% (12313/111929)   
Receiving objects:  12% (13432/111929)   
Receiving objects:  13% (14551/111929)   
Receiving objects:  14% (15671/111929)   
Receiving objects:  15% (16790/111929)   
Receiving objects:  16% (17909/111929)   
Receiving objects:  17% (19028/111929)   
Receiving objects:  18% (20148/111929)   
Receiving objects:  19% (21267/111929)   
Receiving objects:  20% (22386/111929)   
Receiving objects:  21% (23506/111929)   
Receiving objects:  22% (24625/111929), 7.97 MiB | 15.91 MiB/s   
Receiving objects:  23% (25744/111929), 7.97 MiB | 15.91 MiB/s   
Receiving objects:  24% (26863/111929), 7.97 MiB | 15.91 MiB/s   
Receiving objects:  25% (27983/111929), 7.97 MiB | 15.91 MiB/s   
Receiving objects:  26% (29102/111929), 7.97 MiB | 15.91 MiB/s   
Receiving objects:  26% (29961/111929), 23.86 MiB | 23.74 MiB/s   
error: RPC failed; result=56, HTTP code = 200
fatal: The remote end hung up unexpectedly
fatal: early EOF
fatal: index-pack failed

	at org.jenkinsci.plugins.gitclient.CliGitAPIImpl.launchCommandIn(CliGitAPIImpl.java:1723)
	at org.jenkinsci.plugins.gitclient.CliGitAPIImpl.launchCommandWithCredentials(CliGitAPIImpl.java:1459)
	at org.jenkinsci.plugins.gitclient.CliGitAPIImpl.access$300(CliGitAPIImpl.java:63)
	at org.jenkinsci.plugins.gitclient.CliGitAPIImpl$1.execute(CliGitAPIImpl.java:314)
	at org.jenkinsci.plugins.gitclient.CliGitAPIImpl$2.execute(CliGitAPIImpl.java:506)
	at org.jenkinsci.plugins.gitclient.RemoteGitImpl$CommandInvocationHandler$1.call(RemoteGitImpl.java:152)
	at org.jenkinsci.plugins.gitclient.RemoteGitImpl$CommandInvocationHandler$1.call(RemoteGitImpl.java:145)
	at hudson.remoting.UserRequest.perform(UserRequest.java:153)
	at hudson.remoting.UserRequest.perform(UserRequest.java:50)
	at hudson.remoting.Request$2.run(Request.java:332)
	at hudson.remoting.InterceptingExecutorService$1.call(InterceptingExecutorService.java:68)
	at java.util.concurrent.FutureTask.run(FutureTask.java:262)
	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
	at java.lang.Thread.run(Thread.java:745)
	at ......remote call to H6(Native Method)
	at hudson.remoting.Channel.attachCallSiteStackTrace(Channel.java:1416)
	at hudson.remoting.UserResponse.retrieve(UserRequest.java:253)
	at hudson.remoting.Channel.call(Channel.java:781)
	at org.jenkinsci.plugins.gitclient.RemoteGitImpl$CommandInvocationHandler.execute(RemoteGitImpl.java:145)
	at sun.reflect.GeneratedMethodAccessor946.invoke(Unknown Source)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
	at java.lang.reflect.Method.invoke(Method.java:498)
	at org.jenkinsci.plugins.gitclient.RemoteGitImpl$CommandInvocationHandler.invoke(RemoteGitImpl.java:131)
	at com.sun.proxy.$Proxy177.execute(Unknown Source)
	at hudson.plugins.git.GitSCM.retrieveChanges(GitSCM.java:1046)
	at hudson.plugins.git.GitSCM.checkout(GitSCM.java:1086)
	at hudson.scm.SCM.checkout(SCM.java:485)
	at hudson.model.AbstractProject.checkout(AbstractProject.java:1269)
	at hudson.model.AbstractBuild$AbstractBuildExecution.defaultCheckout(AbstractBuild.java:604)
	at jenkins.scm.SCMCheckoutStrategy.checkout(SCMCheckoutStrategy.java:86)
	at hudson.model.AbstractBuild$AbstractBuildExecution.run(AbstractBuild.java:529)
	at hudson.model.Run.execute(Run.java:1741)
	at hudson.matrix.MatrixRun.run(MatrixRun.java:146)
	at hudson.model.ResourceController.execute(ResourceController.java:98)
	at hudson.model.Executor.run(Executor.java:410)
ERROR: null


Build failed in Jenkins: Mesos-Buildbot » autotools,gcc,--verbose --enable-libevent --enable-ssl,GLOG_v=1 MESOS_VERBOSE=1,ubuntu:14.04,(docker||Hadoop)&&(!ubuntu-us1)&&(!ubuntu-eu2) #3219

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/Mesos-Buildbot/BUILDTOOL=autotools,COMPILER=gcc,CONFIGURATION=--verbose%20--enable-libevent%20--enable-ssl,ENVIRONMENT=GLOG_v=1%20MESOS_VERBOSE=1,OS=ubuntu%3A14.04,label_exp=(docker%7C%7CHadoop)&&(!ubuntu-us1)&&(!ubuntu-eu2)/3219/changes>

Changes:

[bmahler] Update the allocator to handle frameworks with multiple roles.

[bmahler] Update the allocator unit tests to reflect MULTI_ROLE support.

[bmahler] Updated the master to handle non-MULTI_ROLE schedulers.

[bmahler] Updated the master's HTTP operations to handle MULTI_ROLE changes.

[bmahler] Updated the agent to be MULTI_ROLE capable.

[bmahler] Updated master to handle non-MULTI_ROLE agents.

[bmahler] Fixed MULTI_ROLE related bugs when updating framework info.

[bmahler] Update the tests to handle MULTI_ROLE support.

------------------------------------------
[...truncated 181031 lines...]
I0204 05:02:02.380934 14616 authenticator.cpp:414] Starting authentication session for crammd5-authenticatee(1296)@172.17.0.2:39400
I0204 05:02:02.381175 14618 scheduler.cpp:361] Connected with the master at https://172.17.0.2:39400/master/api/v1/scheduler
I0204 05:02:02.381189 14614 authenticator.cpp:98] Creating new server SASL connection
I0204 05:02:02.381485 14621 authenticatee.cpp:213] Received SASL authentication mechanisms: CRAM-MD5
I0204 05:02:02.381563 14621 authenticatee.cpp:239] Attempting to authenticate with mechanism 'CRAM-MD5'
I0204 05:02:02.381691 14621 authenticator.cpp:204] Received SASL authentication start
I0204 05:02:02.381883 14621 authenticator.cpp:326] Authentication requires more steps
I0204 05:02:02.382014 14629 authenticatee.cpp:259] Received SASL authentication step
I0204 05:02:02.382391 14621 authenticator.cpp:232] Received SASL authentication step
I0204 05:02:02.382422 14621 auxprop.cpp:109] Request to lookup properties for user: 'test-principal' realm: '3baa7479fba4' server FQDN: '3baa7479fba4' SASL_AUXPROP_VERIFY_AGAINST_HASH: false SASL_AUXPROP_OVERRIDE: false SASL_AUXPROP_AUTHZID: false 
I0204 05:02:02.382447 14621 auxprop.cpp:181] Looking up auxiliary property '*userPassword'
I0204 05:02:02.382501 14621 auxprop.cpp:181] Looking up auxiliary property '*cmusaslsecretCRAM-MD5'
I0204 05:02:02.382546 14621 auxprop.cpp:109] Request to lookup properties for user: 'test-principal' realm: '3baa7479fba4' server FQDN: '3baa7479fba4' SASL_AUXPROP_VERIFY_AGAINST_HASH: false SASL_AUXPROP_OVERRIDE: false SASL_AUXPROP_AUTHZID: true 
I0204 05:02:02.382566 14621 auxprop.cpp:131] Skipping auxiliary property '*userPassword' since SASL_AUXPROP_AUTHZID == true
I0204 05:02:02.382578 14621 auxprop.cpp:131] Skipping auxiliary property '*cmusaslsecretCRAM-MD5' since SASL_AUXPROP_AUTHZID == true
I0204 05:02:02.382599 14621 authenticator.cpp:318] Authentication success
I0204 05:02:02.382691 14617 authenticatee.cpp:299] Authentication success
I0204 05:02:02.382750 14622 scheduler.cpp:243] Sending SUBSCRIBE call to https://172.17.0.2:39400/master/api/v1/scheduler
I0204 05:02:02.382982 14623 master.cpp:7133] Successfully authenticated principal 'test-principal' at slave(689)@172.17.0.2:39400
I0204 05:02:02.382994 14618 authenticator.cpp:432] Authentication session cleanup for crammd5-authenticatee(1296)@172.17.0.2:39400
I0204 05:02:02.383196 14625 slave.cpp:1088] Successfully authenticated with master master@172.17.0.2:39400
I0204 05:02:02.383345 14625 slave.cpp:1515] Will retry registration in 15.212038ms if necessary
I0204 05:02:02.383535 14626 master.cpp:5318] Registering agent at slave(689)@172.17.0.2:39400 (3baa7479fba4) with id 694eecd9-657d-424a-b705-402852e92662-S0
I0204 05:02:02.383981 14624 process.cpp:3697] Handling HTTP event for process 'master' with path: '/master/api/v1/scheduler'
I0204 05:02:02.383993 14629 registrar.cpp:461] Applied 1 operations in 71505ns; attempting to update the registry
I0204 05:02:02.386689 14629 registrar.cpp:506] Successfully updated the registry in 2.62912ms
I0204 05:02:02.387375 14629 slave.cpp:4301] Received ping from slave-observer(624)@172.17.0.2:39400
I0204 05:02:02.387461 14616 master.cpp:5392] Registered agent 694eecd9-657d-424a-b705-402852e92662-S0 at slave(689)@172.17.0.2:39400 (3baa7479fba4) with cpus(*):2; mem(*):1024; disk(*):1024; ports(*):[31000-32000]
I0204 05:02:02.387549 14629 slave.cpp:1134] Registered with master master@172.17.0.2:39400; given agent ID 694eecd9-657d-424a-b705-402852e92662-S0
I0204 05:02:02.387584 14629 fetcher.cpp:90] Clearing fetcher cache
I0204 05:02:02.387715 14623 status_update_manager.cpp:184] Resuming sending status updates
I0204 05:02:02.387790 14617 hierarchical.cpp:515] Added agent 694eecd9-657d-424a-b705-402852e92662-S0 (3baa7479fba4) with cpus(*):2; mem(*):1024; disk(*):1024; ports(*):[31000-32000] (allocated: {})
I0204 05:02:02.387923 14629 slave.cpp:1162] Checkpointing SlaveInfo to '/tmp/ContentTypeAndSSLConfig_SchedulerSSLTest_RunTaskAndTeardown_0_3F4qa5/meta/slaves/694eecd9-657d-424a-b705-402852e92662-S0/slave.info'
I0204 05:02:02.388010 14617 hierarchical.cpp:1802] No allocations performed
I0204 05:02:02.388068 14617 hierarchical.cpp:1404] Performed allocation for 1 agents in 173638ns
I0204 05:02:02.388240 14629 slave.cpp:1200] Forwarding total oversubscribed resources {}
I0204 05:02:02.388406 14618 master.cpp:5949] Received update of agent 694eecd9-657d-424a-b705-402852e92662-S0 at slave(689)@172.17.0.2:39400 (3baa7479fba4) with total oversubscribed resources {}
I0204 05:02:02.388679 14629 hierarchical.cpp:588] Agent 694eecd9-657d-424a-b705-402852e92662-S0 (3baa7479fba4) updated with oversubscribed resources {} (total: cpus(*):2; mem(*):1024; disk(*):1024; ports(*):[31000-32000], allocated: {})
I0204 05:02:02.388851 14629 hierarchical.cpp:1802] No allocations performed
I0204 05:02:02.388897 14629 hierarchical.cpp:1404] Performed allocation for 1 agents in 120039ns
I0204 05:02:02.421458 14617 http.cpp:420] HTTP POST for /master/api/v1/scheduler from 172.17.0.2:51690
I0204 05:02:02.423868 14617 master.cpp:2422] Received subscription request for HTTP framework 'default'
I0204 05:02:02.424036 14617 master.cpp:2159] Authorizing framework principal 'test-principal' to receive offers for role '*'
I0204 05:02:02.424553 14614 master.cpp:2536] Subscribing framework 'default' with checkpointing disabled and capabilities [  ]
I0204 05:02:02.425300 14617 master.hpp:2114] Sending heartbeat to 694eecd9-657d-424a-b705-402852e92662-0000
I0204 05:02:02.425313 14626 hierarchical.cpp:286] Added framework 694eecd9-657d-424a-b705-402852e92662-0000
I0204 05:02:02.426723 14626 hierarchical.cpp:1892] No inverse offers to send out!
I0204 05:02:02.426784 14626 hierarchical.cpp:1404] Performed allocation for 1 agents in 1.300053ms
I0204 05:02:02.427305 14617 master.cpp:6932] Sending 1 offers to framework 694eecd9-657d-424a-b705-402852e92662-0000 (default)
I0204 05:02:02.464792 14627 scheduler.cpp:676] Enqueuing event SUBSCRIBED received from https://172.17.0.2:39400/master/api/v1/scheduler
I0204 05:02:02.465888 14621 scheduler.cpp:676] Enqueuing event HEARTBEAT received from https://172.17.0.2:39400/master/api/v1/scheduler
I0204 05:02:02.466902 14622 scheduler.cpp:676] Enqueuing event OFFERS received from https://172.17.0.2:39400/master/api/v1/scheduler
I0204 05:02:02.469596 14614 scheduler.cpp:243] Sending ACCEPT call to https://172.17.0.2:39400/master/api/v1/scheduler
I0204 05:02:02.470865 14619 process.cpp:3697] Handling HTTP event for process 'master' with path: '/master/api/v1/scheduler'
I0204 05:02:02.509284 14616 http.cpp:420] HTTP POST for /master/api/v1/scheduler from 172.17.0.2:51689
I0204 05:02:02.510342 14616 master.cpp:3745] Processing ACCEPT call for offers: [ 694eecd9-657d-424a-b705-402852e92662-O0 ] on agent 694eecd9-657d-424a-b705-402852e92662-S0 at slave(689)@172.17.0.2:39400 (3baa7479fba4) for framework 694eecd9-657d-424a-b705-402852e92662-0000 (default)
I0204 05:02:02.510460 14616 master.cpp:3318] Authorizing framework principal 'test-principal' to launch task 573acaa4-067a-4450-b411-78f886fa37a9
W0204 05:02:02.513442 14616 validation.cpp:1048] Executor 'default' for task '573acaa4-067a-4450-b411-78f886fa37a9' uses less CPUs (None) than the minimum required (0.01). Please update your executor, as this will be mandatory in future releases.
W0204 05:02:02.513485 14616 validation.cpp:1060] Executor 'default' for task '573acaa4-067a-4450-b411-78f886fa37a9' uses less memory (None) than the minimum required (32MB). Please update your executor, as this will be mandatory in future releases.
I0204 05:02:02.514117 14616 master.cpp:8993] Adding task 573acaa4-067a-4450-b411-78f886fa37a9 with resources cpus(*)(allocated: *):2; mem(*)(allocated: *):1024; disk(*)(allocated: *):1024; ports(*)(allocated: *):[31000-32000] on agent 694eecd9-657d-424a-b705-402852e92662-S0 at slave(689)@172.17.0.2:39400 (3baa7479fba4)
I0204 05:02:02.514523 14616 master.cpp:4395] Launching task 573acaa4-067a-4450-b411-78f886fa37a9 of framework 694eecd9-657d-424a-b705-402852e92662-0000 (default) with resources cpus(*)(allocated: *):2; mem(*)(allocated: *):1024; disk(*)(allocated: *):1024; ports(*)(allocated: *):[31000-32000] on agent 694eecd9-657d-424a-b705-402852e92662-S0 at slave(689)@172.17.0.2:39400 (3baa7479fba4)
I0204 05:02:02.515084 14614 slave.cpp:1583] Got assigned task '573acaa4-067a-4450-b411-78f886fa37a9' for framework 694eecd9-657d-424a-b705-402852e92662-0000
I0204 05:02:02.516017 14614 slave.cpp:1743] Launching task '573acaa4-067a-4450-b411-78f886fa37a9' for framework 694eecd9-657d-424a-b705-402852e92662-0000
I0204 05:02:02.518983 14614 paths.cpp:547] Trying to chown '/tmp/ContentTypeAndSSLConfig_SchedulerSSLTest_RunTaskAndTeardown_0_3F4qa5/slaves/694eecd9-657d-424a-b705-402852e92662-S0/frameworks/694eecd9-657d-424a-b705-402852e92662-0000/executors/default/runs/937c6546-d118-47a1-aecd-28eff6cfe7a9' to user 'mesos'
I0204 05:02:02.519258 14614 slave.cpp:6414] Launching executor 'default' of framework 694eecd9-657d-424a-b705-402852e92662-0000 with resources {} in work directory '/tmp/ContentTypeAndSSLConfig_SchedulerSSLTest_RunTaskAndTeardown_0_3F4qa5/slaves/694eecd9-657d-424a-b705-402852e92662-S0/frameworks/694eecd9-657d-424a-b705-402852e92662-0000/executors/default/runs/937c6546-d118-47a1-aecd-28eff6cfe7a9'
I0204 05:02:02.520053 14614 slave.cpp:2073] Queued task '573acaa4-067a-4450-b411-78f886fa37a9' for executor 'default' of framework 694eecd9-657d-424a-b705-402852e92662-0000
I0204 05:02:02.520150 14614 slave.cpp:884] Successfully attached file '/tmp/ContentTypeAndSSLConfig_SchedulerSSLTest_RunTaskAndTeardown_0_3F4qa5/slaves/694eecd9-657d-424a-b705-402852e92662-S0/frameworks/694eecd9-657d-424a-b705-402852e92662-0000/executors/default/runs/937c6546-d118-47a1-aecd-28eff6cfe7a9'
I0204 05:02:02.522351 14627 executor.cpp:192] Version: 1.2.0
I0204 05:02:02.544659 14622 executor.cpp:401] Connected with the agent
I0204 05:02:02.545338 14624 executor.cpp:302] Sending SUBSCRIBE call to https://172.17.0.2:39400/slave(689)/api/v1/executor
I0204 05:02:02.546437 14627 process.cpp:3697] Handling HTTP event for process 'slave(689)' with path: '/slave(689)/api/v1/executor'
I0204 05:02:02.585105 14619 http.cpp:303] HTTP POST for /slave(689)/api/v1/executor from 172.17.0.2:51691
I0204 05:02:02.585381 14619 slave.cpp:3124] Received Subscribe request for HTTP executor 'default' of framework 694eecd9-657d-424a-b705-402852e92662-0000
I0204 05:02:02.586966 14619 slave.cpp:2286] Sending queued task '573acaa4-067a-4450-b411-78f886fa37a9' to executor 'default' of framework 694eecd9-657d-424a-b705-402852e92662-0000 (via HTTP)
I0204 05:02:02.625735 14627 executor.cpp:716] Enqueuing event SUBSCRIBED received from https://172.17.0.2:39400/slave(689)/api/v1/executor
I0204 05:02:02.626966 14620 executor.cpp:716] Enqueuing event LAUNCH received from https://172.17.0.2:39400/slave(689)/api/v1/executor
I0204 05:02:02.628039 14625 executor.cpp:302] Sending UPDATE call to https://172.17.0.2:39400/slave(689)/api/v1/executor
I0204 05:02:02.629220 14615 process.cpp:3697] Handling HTTP event for process 'slave(689)' with path: '/slave(689)/api/v1/executor'
I0204 05:02:02.669335 14616 http.cpp:303] HTTP POST for /slave(689)/api/v1/executor from 172.17.0.2:51692
I0204 05:02:02.669546 14616 slave.cpp:3771] Handling status update TASK_RUNNING (UUID: dc7bc484-de04-4b12-8144-ed41c2239f86) for task 573acaa4-067a-4450-b411-78f886fa37a9 of framework 694eecd9-657d-424a-b705-402852e92662-0000
I0204 05:02:02.670435 14622 status_update_manager.cpp:323] Received status update TASK_RUNNING (UUID: dc7bc484-de04-4b12-8144-ed41c2239f86) for task 573acaa4-067a-4450-b411-78f886fa37a9 of framework 694eecd9-657d-424a-b705-402852e92662-0000
I0204 05:02:02.670547 14622 status_update_manager.cpp:500] Creating StatusUpdate stream for task 573acaa4-067a-4450-b411-78f886fa37a9 of framework 694eecd9-657d-424a-b705-402852e92662-0000
I0204 05:02:02.671293 14622 status_update_manager.cpp:377] Forwarding update TASK_RUNNING (UUID: dc7bc484-de04-4b12-8144-ed41c2239f86) for task 573acaa4-067a-4450-b411-78f886fa37a9 of framework 694eecd9-657d-424a-b705-402852e92662-0000 to the agent
I0204 05:02:02.671623 14623 slave.cpp:4211] Forwarding the update TASK_RUNNING (UUID: dc7bc484-de04-4b12-8144-ed41c2239f86) for task 573acaa4-067a-4450-b411-78f886fa37a9 of framework 694eecd9-657d-424a-b705-402852e92662-0000 to master@172.17.0.2:39400
I0204 05:02:02.671901 14623 slave.cpp:4105] Status update manager successfully handled status update TASK_RUNNING (UUID: dc7bc484-de04-4b12-8144-ed41c2239f86) for task 573acaa4-067a-4450-b411-78f886fa37a9 of framework 694eecd9-657d-424a-b705-402852e92662-0000
I0204 05:02:02.672109 14614 master.cpp:6094] Status update TASK_RUNNING (UUID: dc7bc484-de04-4b12-8144-ed41c2239f86) for task 573acaa4-067a-4450-b411-78f886fa37a9 of framework 694eecd9-657d-424a-b705-402852e92662-0000 from agent 694eecd9-657d-424a-b705-402852e92662-S0 at slave(689)@172.17.0.2:39400 (3baa7479fba4)
I0204 05:02:02.672202 14614 master.cpp:6162] Forwarding status update TASK_RUNNING (UUID: dc7bc484-de04-4b12-8144-ed41c2239f86) for task 573acaa4-067a-4450-b411-78f886fa37a9 of framework 694eecd9-657d-424a-b705-402852e92662-0000
I0204 05:02:02.672693 14614 master.cpp:8242] Updating the state of task 573acaa4-067a-4450-b411-78f886fa37a9 of framework 694eecd9-657d-424a-b705-402852e92662-0000 (latest state: TASK_RUNNING, status update state: TASK_RUNNING)
I0204 05:02:02.673029 14623 executor.cpp:716] Enqueuing event ACKNOWLEDGED received from https://172.17.0.2:39400/slave(689)/api/v1/executor
I0204 05:02:02.673960 14629 scheduler.cpp:676] Enqueuing event UPDATE received from https://172.17.0.2:39400/master/api/v1/scheduler
I0204 05:02:02.675520 14626 scheduler.cpp:243] Sending TEARDOWN call to https://172.17.0.2:39400/master/api/v1/scheduler
I0204 05:02:02.676790 14623 process.cpp:3697] Handling HTTP event for process 'master' with path: '/master/api/v1/scheduler'
I0204 05:02:02.717206 14622 http.cpp:420] HTTP POST for /master/api/v1/scheduler from 172.17.0.2:51689
I0204 05:02:02.717309 14622 master.cpp:7657] Removing framework 694eecd9-657d-424a-b705-402852e92662-0000 (default)
I0204 05:02:02.717339 14622 master.cpp:3080] Deactivating framework 694eecd9-657d-424a-b705-402852e92662-0000 (default)
I0204 05:02:02.717571 14621 hierarchical.cpp:415] Deactivated framework 694eecd9-657d-424a-b705-402852e92662-0000
I0204 05:02:02.717587 14619 slave.cpp:2613] Asked to shut down framework 694eecd9-657d-424a-b705-402852e92662-0000 by master@172.17.0.2:39400
I0204 05:02:02.717636 14619 slave.cpp:2638] Shutting down framework 694eecd9-657d-424a-b705-402852e92662-0000
I0204 05:02:02.717586 14622 master.cpp:8242] Updating the state of task 573acaa4-067a-4450-b411-78f886fa37a9 of framework 694eecd9-657d-424a-b705-402852e92662-0000 (latest state: TASK_KILLED, status update state: TASK_KILLED)
I0204 05:02:02.717703 14619 slave.cpp:5020] Shutting down executor 'default' of framework 694eecd9-657d-424a-b705-402852e92662-0000 (via HTTP)
I0204 05:02:02.718554 14616 executor.cpp:716] Enqueuing event SHUTDOWN received from https://172.17.0.2:39400/slave(689)/api/v1/executor
I0204 05:02:02.718786 14622 master.cpp:8336] Removing task 573acaa4-067a-4450-b411-78f886fa37a9 with resources cpus(*)(allocated: *):2; mem(*)(allocated: *):1024; disk(*)(allocated: *):1024; ports(*)(allocated: *):[31000-32000] of framework 694eecd9-657d-424a-b705-402852e92662-0000 on agent 694eecd9-657d-424a-b705-402852e92662-S0 at slave(689)@172.17.0.2:39400 (3baa7479fba4)
I0204 05:02:02.719326 14622 master.cpp:8365] Removing executor 'default' with resources {} of framework 694eecd9-657d-424a-b705-402852e92662-0000 on agent 694eecd9-657d-424a-b705-402852e92662-S0 at slave(689)@172.17.0.2:39400 (3baa7479fba4)
I0204 05:02:02.719558 14626 hierarchical.cpp:1082] Recovered cpus(*)(allocated: *):2; mem(*)(allocated: *):1024; disk(*)(allocated: *):1024; ports(*)(allocated: *):[31000-32000] (total: cpus(*):2; mem(*):1024; disk(*):1024; ports(*):[31000-32000], allocated: {}) on agent 694eecd9-657d-424a-b705-402852e92662-S0 from framework 694eecd9-657d-424a-b705-402852e92662-0000
I0204 05:02:02.720146 14629 hierarchical.cpp:362] Removed framework 694eecd9-657d-424a-b705-402852e92662-0000
E0204 05:02:02.720491 14625 scheduler.cpp:649] End-Of-File received from master. The master closed the event stream
I0204 05:02:02.721046 14614 scheduler.cpp:444] Re-detecting master
I0204 05:02:02.721781 14614 scheduler.cpp:395] Ignoring disconnection attempt from stale connection
I0204 05:02:02.721880 14614 scheduler.cpp:395] Ignoring disconnection attempt from stale connection
I0204 05:02:02.722121 14614 scheduler.cpp:470] New master detected at master@172.17.0.2:39400
I0204 05:02:02.722158 14614 scheduler.cpp:479] Waiting for 0ns before initiating a re-(connection) attempt with the master
I0204 05:02:02.725340 14623 slave.cpp:4706] Executor 'default' of framework 694eecd9-657d-424a-b705-402852e92662-0000 exited with status 0
I0204 05:02:02.725828 14623 slave.cpp:4806] Cleaning up executor 'default' of framework 694eecd9-657d-424a-b705-402852e92662-0000 (via HTTP)
W0204 05:02:02.725989 14626 master.cpp:6218] Ignoring unknown exited executor 'default' of framework 694eecd9-657d-424a-b705-402852e92662-0000 on agent 694eecd9-657d-424a-b705-402852e92662-S0 at slave(689)@172.17.0.2:39400 (3baa7479fba4)
I0204 05:02:02.726328 14627 gc.cpp:55] Scheduling '/tmp/ContentTypeAndSSLConfig_SchedulerSSLTest_RunTaskAndTeardown_0_3F4qa5/slaves/694eecd9-657d-424a-b705-402852e92662-S0/frameworks/694eecd9-657d-424a-b705-402852e92662-0000/executors/default/runs/937c6546-d118-47a1-aecd-28eff6cfe7a9' for gc 6.99999159611259days in the future
I0204 05:02:02.726521 14623 slave.cpp:4894] Cleaning up framework 694eecd9-657d-424a-b705-402852e92662-0000
I0204 05:02:02.726527 14628 gc.cpp:55] Scheduling '/tmp/ContentTypeAndSSLConfig_SchedulerSSLTest_RunTaskAndTeardown_0_3F4qa5/slaves/694eecd9-657d-424a-b705-402852e92662-S0/frameworks/694eecd9-657d-424a-b705-402852e92662-0000/executors/default' for gc 6.99999159208296days in the future
I0204 05:02:02.726687 14617 status_update_manager.cpp:285] Closing status update streams for framework 694eecd9-657d-424a-b705-402852e92662-0000
I0204 05:02:02.726747 14617 status_update_manager.cpp:531] Cleaning up status update stream for task 573acaa4-067a-4450-b411-78f886fa37a9 of framework 694eecd9-657d-424a-b705-402852e92662-0000
I0204 05:02:02.726796 14624 gc.cpp:55] Scheduling '/tmp/ContentTypeAndSSLConfig_SchedulerSSLTest_RunTaskAndTeardown_0_3F4qa5/slaves/694eecd9-657d-424a-b705-402852e92662-S0/frameworks/694eecd9-657d-424a-b705-402852e92662-0000' for gc 6.99999158904days in the future
I0204 05:02:02.726845 14623 slave.cpp:803] Agent terminating
I0204 05:02:02.727107 14616 master.cpp:1263] Agent 694eecd9-657d-424a-b705-402852e92662-S0 at slave(689)@172.17.0.2:39400 (3baa7479fba4) disconnected
I0204 05:02:02.727171 14616 master.cpp:3117] Disconnecting agent 694eecd9-657d-424a-b705-402852e92662-S0 at slave(689)@172.17.0.2:39400 (3baa7479fba4)
I0204 05:02:02.728067 14616 master.cpp:3136] Deactivating agent 694eecd9-657d-424a-b705-402852e92662-S0 at slave(689)@172.17.0.2:39400 (3baa7479fba4)
I0204 05:02:02.728241 14623 hierarchical.cpp:617] Agent 694eecd9-657d-424a-b705-402852e92662-S0 deactivated
I0204 05:02:02.730278 14629 master.cpp:1102] Master terminating
I0204 05:02:02.730913 14618 hierarchical.cpp:548] Removed agent 694eecd9-657d-424a-b705-402852e92662-S0
W0204 05:02:02.732270 32237 master.hpp:2541] Failed to close HTTP pipe for 694eecd9-657d-424a-b705-402852e92662-0000 (default)
[       OK ] ContentTypeAndSSLConfig/SchedulerSSLTest.RunTaskAndTeardown/0 (1076 ms)
[ RUN      ] ContentTypeAndSSLConfig/SchedulerSSLTest.RunTaskAndTeardown/1
W0204 05:02:02.743487 14630 process.cpp:3022] Attempted to spawn a process (__http_connection__(1045)@172.17.0.2:39400) after finalizing libprocess!
*** Aborted at 1486184522 (unix time) try "date -d @1486184522" if you are using GNU date ***
I0204 05:02:02.744928 32237 process.cpp:1246] libprocess is initialized on 172.17.0.2:47392 with 16 worker threads
I0204 05:02:02.746425 32237 cluster.cpp:160] Creating default 'local' authorizer
I0204 05:02:02.749294 14634 master.cpp:383] Master 43745a83-c840-403e-ba7e-ffc7957f037b (3baa7479fba4) started on 172.17.0.2:47392
I0204 05:02:02.749357 14634 master.cpp:385] Flags at startup: --acls="" --agent_ping_timeout="15secs" --agent_reregister_timeout="10mins" --allocation_interval="1secs" --allocator="HierarchicalDRF" --authenticate_agents="true" --authenticate_frameworks="true" --authenticate_http_frameworks="true" --authenticate_http_readonly="true" --authenticate_http_readwrite="true" --authenticators="crammd5" --authorizers="local" --credentials="/tmp/rzGoYM/credentials" --framework_sorter="drf" --help="false" --hostname_lookup="true" --http_authenticators="basic" --http_framework_authenticators="basic" --initialize_driver_logging="true" --log_auto_initialize="true" --logbufsecs="0" --logging_level="INFO" --max_agent_ping_timeouts="5" --max_completed_frameworks="50" --max_completed_tasks_per_framework="1000" --max_unreachable_tasks_per_framework="1000" --quiet="false" --recovery_agent_removal_limit="100%" --registry="in_memory" --registry_fetch_timeout="1mins" --registry_gc_interval="15mins" --registry_max_agent_age="2weeks" --registry_max_agent_count="102400" --registry_store_timeout="100secs" --registry_strict="false" --root_submissions="true" --user_sorter="drf" --version="false" --webui_dir="/mesos/mesos-1.2.0/_inst/share/mesos/webui" --work_dir="/tmp/rzGoYM/master" --zk_session_timeout="10secs"
I0204 05:02:02.749853 14634 master.cpp:435] Master only allowing authenticated frameworks to register
I0204 05:02:02.749869 14634 master.cpp:449] Master only allowing authenticated agents to register
I0204 05:02:02.749886 14634 master.cpp:462] Master only allowing authenticated HTTP frameworks to register
I0204 05:02:02.749902 14634 credentials.hpp:37] Loading credentials for authentication from '/tmp/rzGoYM/credentials'
I0204 05:02:02.750213 14634 master.cpp:507] Using default 'crammd5' authenticator
I0204 05:02:02.750362 14634 http.cpp:919] Using default 'basic' HTTP authenticator for realm 'mesos-master-readonly'
I0204 05:02:02.750550 14634 http.cpp:919] Using default 'basic' HTTP authenticator for realm 'mesos-master-readwrite'
I0204 05:02:02.750721 14634 http.cpp:919] Using default 'basic' HTTP authenticator for realm 'mesos-master-scheduler'
I0204 05:02:02.750944 14634 master.cpp:587] Authorization enabled
I0204 05:02:02.751121 14642 hierarchical.cpp:161] Initialized hierarchical allocator process
I0204 05:02:02.751121 14631 whitelist_watcher.cpp:77] No whitelist given
I0204 05:02:02.753342 14639 master.cpp:2123] Elected as the leading master!
I0204 05:02:02.753376 14639 master.cpp:1645] Recovering from registrar
I0204 05:02:02.753492 14632 registrar.cpp:329] Recovering registrar
PC: @     0x2b5c6780e5d0 process::PID<>::PID()
*** SIGSEGV (@0x0) received by PID 32237 (TID 0x2b5c74e03700) from PID 0; stack trace: ***
I0204 05:02:02.755612 14644 registrar.cpp:362] Successfully fetched the registry (0B) in 2.07488ms
I0204 05:02:02.755723 14644 registrar.cpp:461] Applied 1 operations in 17451ns; attempting to update the registry
    @     0x2b5cc07d9197 (unknown)
    @     0x2b5cc07dd479 (unknown)
    @     0x2b5c69ea6330 (unknown)
I0204 05:02:02.758142 14644 registrar.cpp:506] Successfully updated the registry in 2.36288ms
I0204 05:02:02.758256 14644 registrar.cpp:392] Successfully recovered registrar
I0204 05:02:02.758697 14644 master.cpp:1761] Recovered 0 agents from the registry (129B); allowing 10mins for agents to re-register
I0204 05:02:02.758721 14646 hierarchical.cpp:188] Skipping recovery of hierarchical allocator: nothing to recover
I0204 05:02:02.762799 32237 cluster.cpp:446] Creating default 'local' authorizer
I0204 05:02:02.764309 14631 slave.cpp:211] Mesos agent started on (690)@172.17.0.2:47392
I0204 05:02:02.764963 32237 scheduler.cpp:184] Version: 1.2.0
I0204 05:02:02.764395 14631 slave.cpp:212] Flags at startup: --acls="" --appc_simple_discovery_uri_prefix="http://" --appc_store_dir="/tmp/mesos/store/appc" --authenticate_http_readonly="true" --authenticate_http_readwrite="true" --authenticatee="crammd5" --authentication_backoff_factor="1secs" --authorizer="local" --cgroups_cpu_enable_pids_and_tids_count="false" --cgroups_enable_cfs="false" --cgroups_hierarchy="/sys/fs/cgroup" --cgroups_limit_swap="false" --cgroups_root="mesos" --container_disk_watch_interval="15secs" --containerizers="mesos" --credential="/tmp/ContentTypeAndSSLConfig_SchedulerSSLTest_RunTaskAndTeardown_1_DE0Ara/credential" --default_role="*" --disk_watch_interval="1mins" --docker="docker" --docker_kill_orphans="true" --docker_registry="https://registry-1.docker.io" --docker_remove_delay="6hrs" --docker_socket="/var/run/docker.sock" --docker_stop_timeout="0ns" --docker_store_dir="/tmp/mesos/store/docker" --docker_volume_checkpoint_dir="/var/run/mesos/isolators/docker/volume" --enforce_container_disk_quota="false" --executor_registration_timeout="1mins" --executor_shutdown_grace_period="5secs" --fetcher_cache_dir="/tmp/ContentTypeAndSSLConfig_SchedulerSSLTest_RunTaskAndTeardown_1_DE0Ara/fetch" --fetcher_cache_size="2GB" --frameworks_home="" --gc_delay="1weeks" --gc_disk_headroom="0.1" --hadoop_home="" --help="false" --hostname_lookup="true" --http_authenticators="basic" --http_command_executor="false" --http_credentials="/tmp/ContentTypeAndSSLConfig_SchedulerSSLTest_RunTaskAndTeardown_1_DE0Ara/http_credentials" --http_heartbeat_interval="30secs" --initialize_driver_logging="true" --isolation="posix/cpu,posix/mem" --launcher="posix" --launcher_dir="/mesos/mesos-1.2.0/_build/src" --logbufsecs="0" --logging_level="INFO" --max_completed_executors_per_framework="150" --oversubscribed_resources_interval="15secs" --perf_duration="10secs" --perf_interval="1mins" --qos_correction_interval_min="0ns" --quiet="false" --recover="reconnect" --recovery_timeout="15mins" --registration_backoff_factor="10ms" --resources="cpus:2;gpus:0;mem:1024;disk:1024;ports:[31000-32000]" --revocable_cpu_low_priority="true" --runtime_dir="/tmp/ContentTypeAndSSLConfig_SchedulerSSLTest_RunTaskAndTeardown_1_DE0Ara" --sandbox_directory="/mnt/mesos/sandbox" --strict="true" --switch_user="true" --systemd_enable_support="true" --systemd_runtime_directory="/run/systemd/system" --version="false" --work_dir="/tmp/ContentTypeAndSSLConfig_SchedulerSSLTest_RunTaskAndTeardown_1_u1eZHY"
I0204 05:02:02.765041 14631 credentials.hpp:86] Loading credential for authentication from '/tmp/ContentTypeAndSSLConfig_SchedulerSSLTest_RunTaskAndTeardown_1_DE0Ara/credential'
I0204 05:02:02.765224 14631 slave.cpp:354] Agent using credential for: test-principal
I0204 05:02:02.765250 14631 credentials.hpp:37] Loading credentials for authentication from '/tmp/ContentTypeAndSSLConfig_SchedulerSSLTest_RunTaskAndTeardown_1_DE0Ara/http_credentials'
I0204 05:02:02.765511 14640 scheduler.cpp:470] New master detected at master@172.17.0.2:47392
I0204 05:02:02.765543 14631 http.cpp:919] Using default 'basic' HTTP authenticator for realm 'mesos-agent-readonly'
I0204 05:02:02.765722 14631 http.cpp:919] Using default 'basic' HTTP authenticator for realm 'mesos-agent-readwrite'
    @     0x2b5c6780e5d0 process::PID<>::PID()
I0204 05:02:02.766948 14631 slave.cpp:541] Agent resources: cpus(*):2; mem(*):1024; disk(*):1024; ports(*):[31000-32000]
I0204 05:02:02.767027 14631 slave.cpp:549] Agent attributes: [  ]
I0204 05:02:02.767042 14631 slave.cpp:554] Agent hostname: 3baa7479fba4
I0204 05:02:02.767127 14640 scheduler.cpp:479] Waiting for 0ns before initiating a re-(connection) attempt with the master
I0204 05:02:02.767180 14646 status_update_manager.cpp:177] Pausing sending status updates
I0204 05:02:02.768537 14641 state.cpp:60] Recovering state from '/tmp/ContentTypeAndSSLConfig_SchedulerSSLTest_RunTaskAndTeardown_1_u1eZHY/meta'
I0204 05:02:02.768781 14644 status_update_manager.cpp:203] Recovering status update manager
I0204 05:02:02.769122 14642 slave.cpp:5499] Finished recovery
I0204 05:02:02.769616 14642 slave.cpp:5673] Querying resource estimator for oversubscribable resources
I0204 05:02:02.769855 14632 status_update_manager.cpp:177] Pausing sending status updates
I0204 05:02:02.769856 14642 slave.cpp:931] New master detected at master@172.17.0.2:47392
I0204 05:02:02.769923 14642 slave.cpp:966] Detecting new master
I0204 05:02:02.770051 14642 slave.cpp:5687] Received oversubscribable resources {} from the resource estimator
    @     0x2b5c67806231 process::Process<>::self()
    @     0x2b5c677e8ebb process::ProcessManager::spawn()
    @     0x2b5c677f012c process::spawn()
    @     0x2b5c67743263 process::spawn<>()
    @     0x2b5c6773fff3 process::http::Connection::Data::Data()
    @     0x2b5c6776bb38 _ZN9__gnu_cxx13new_allocatorIN7process4http10Connection4DataEE9constructIS4_JRKNS1_7network8internal6SocketINS7_7AddressEEEEEEvPT_DpOT0_
    @     0x2b5c6776ac26 std::allocator_traits<>::_S_construct<>()
    @     0x2b5c67769008 _ZNSt16allocator_traitsISaIN7process4http10Connection4DataEEE9constructIS3_IRKNS0_7network8internal6SocketINS7_7AddressEEEEEEDTcl12_S_constructfp_fp0_spcl7forwardIT0_Efp1_EEERS4_PT_DpOSE_
    @     0x2b5c677668a3 std::_Sp_counted_ptr_inplace<>::_Sp_counted_ptr_inplace<>()
    @     0x2b5c677630c1 __gnu_cxx::new_allocator<>::construct<>()
    @     0x2b5c6775ea67 std::allocator_traits<>::_S_construct<>()
    @     0x2b5c67759e4a _ZNSt16allocator_traitsISaISt23_Sp_counted_ptr_inplaceIN7process4http10Connection4DataESaIS4_ELN9__gnu_cxx12_Lock_policyE2EEEE9constructIS8_IKS5_RKNS1_7network8internal6SocketINSD_7AddressEEEEEEDTcl12_S_constructfp_fp0_spcl7forwardIT0_Efp1_EEERS9_PT_DpOSK_
    @     0x2b5c6775503f _ZNSt14__shared_countILN9__gnu_cxx12_Lock_policyE2EEC2IN7process4http10Connection4DataESaIS7_EJRKNS4_7network8internal6SocketINS9_7AddressEEEEEESt19_Sp_make_shared_tagPT_RKT0_DpOT1_
    @     0x2b5c6774ff1a _ZNSt12__shared_ptrIN7process4http10Connection4DataELN9__gnu_cxx12_Lock_policyE2EEC1ISaIS3_EJRKNS0_7network8internal6SocketINS9_7AddressEEEEEESt19_Sp_make_shared_tagRKT_DpOT0_
    @     0x2b5c6774b8da std::shared_ptr<>::shared_ptr<>()
    @     0x2b5c677480a0 _ZSt15allocate_sharedIN7process4http10Connection4DataESaIS3_EJRKNS0_7network8internal6SocketINS5_7AddressEEEEESt10shared_ptrIT_ERKT0_DpOT1_
    @     0x2b5c67743336 _ZSt11make_sharedIN7process4http10Connection4DataEJRKNS0_7network8internal6SocketINS4_7AddressEEEEESt10shared_ptrIT_EDpOT0_
    @     0x2b5c67706bdd process::http::Connection::Connection()
    @     0x2b5c67706db2 _ZZN7process4http7connectERKNS_7network7AddressENS0_6SchemeEENKUlvE_clEv
    @     0x2b5c67718950 _ZNSt17_Function_handlerIFN7process6FutureINS0_4http10ConnectionEEEvEZNS2_7connectERKNS0_7network7AddressENS2_6SchemeEEUlvE_E9_M_invokeERKSt9_Any_data
    @     0x2b5c66d7e229 std::function<>::operator()()
    @     0x2b5c66d781d9 _ZNSt5_BindIFSt8functionIFN7process6FutureINS1_4http10ConnectionEEEvEEvEE6__callIS5_IRK7NothingEIEEET_OSt5tupleIIDpT0_EESt12_Index_tupleIIXspT1_EEE
    @     0x2b5c66d716b5 _ZNSt5_BindIFSt8functionIFN7process6FutureINS1_4http10ConnectionEEEvEEvEEclIJRK7NothingES5_EET0_DpOT_
    @     0x2b5c66d6969a std::_Function_handler<>::_M_invoke()
    @     0x2b5c66d7e307 std::function<>::operator()()
    @     0x2b5c66d7826d process::internal::thenf<>()
    @     0x2b5c66d88e95 _ZNSt5_BindIFPFvRKSt8functionIFN7process6FutureINS1_4http10ConnectionEEERK7NothingEERKSt10shared_ptrINS1_7PromiseIS4_EEERKNS2_IS6_EEESA_SG_St12_PlaceholderILi1EEEE6__callIvJSL_EJLm0ELm1ELm2EEEET_OSt5tupleIJDpT0_EESt12_Index_tupleIJXspT1_EEE
    @     0x2b5c66d849db _ZNSt5_BindIFPFvRKSt8functionIFN7process6FutureINS1_4http10ConnectionEEERK7NothingEERKSt10shared_ptrINS1_7PromiseIS4_EEERKNS2_IS6_EEESA_SG_St12_PlaceholderILi1EEEEclIJSL_EvEET0_DpOT_
make[4]: *** [check-local] Segmentation fault
make[4]: Leaving directory `/mesos/mesos-1.2.0/_build/src'
make[3]: *** [check-am] Error 2
make[3]: Leaving directory `/mesos/mesos-1.2.0/_build/src'
make[2]: *** [check] Error 2
make[2]: Leaving directory `/mesos/mesos-1.2.0/_build/src'
make[1]: *** [check-recursive] Error 1
make[1]: Leaving directory `/mesos/mesos-1.2.0/_build'
make: *** [distcheck] Error 1
+ docker rmi mesos-1486181357-28653
Untagged: mesos-1486181357-28653:latest
Deleted: sha256:c3c12c02c233514e878d1defd6f7adecde2975dfaf475cb84aada188f6b657fd
Deleted: sha256:e9c453125b95f3f01ba04449a6be3809d4411daf951b537759e186f00e9e0375
Deleted: sha256:e021bdc572a0e4e14324d8a395a5e8527cae56164f401f0fcbb183ab94607565
Deleted: sha256:d4db67527e17ded12e9a21348f751dee9b1c6659185dc800585c736425280506
Deleted: sha256:63df58bf0dd0c3e596f0dc73b7f8b648bf0d1339cf9ba2da0ca4a289be562ee8
Deleted: sha256:d7e0fae7075efa5ab6613fd81fc3d53274e855d6c0df89255de865c755455828
Deleted: sha256:e3f4defab2d6b7e2babc66026a10b483419bd7b933c4576d70faf66f96363394
Deleted: sha256:c1b09b2668406d1277713793d31f0f68905a36a00a54ed85a3d9077582ca9189
Deleted: sha256:cd0d40f1cff1b000861e397249383956f8bec28f7ba1ee4c8f539bd2eb56c5a0
Deleted: sha256:8bfdd4fe2e5f152a42d6c8265560f3038cf4aad19659ab12c72f390a096367e1
Deleted: sha256:610807729d9339e8751e7f4854fd7c81f2b937c8ddeda76f9cd39c533b1f9031
Deleted: sha256:ce5fde21914666a8a32346b37d58772ca0be313567e3646581792a9c96461700
Deleted: sha256:66a21845441c7be80e0f93e366c251b0e791a351a8f1a92577233ad0c58ace26
Deleted: sha256:114b9d99ffee5679127e29181e954ff3eea2452d8cff4f6b92f90793489de9fb
Deleted: sha256:b4b37329756bc48ad4f532cbe51febe28be97445270100300358fda0b8da8e09
Deleted: sha256:87994798c7bdcc72f45799576ad84c4e5666d111db8a701dca075561cc0b4088
Deleted: sha256:3989880d5311b3a4fd4bb656c8d64916bf9ae3c76257aabb1c0148ab915b577d
Deleted: sha256:a5c312e225a4f8a337cea74bde0bf188d50e5e4c7ede62f7509a55c63b0eaed4
Deleted: sha256:9227d7e2fb325646d749045bc8c6973632795e5575f92542899d004789fc3c8a
Deleted: sha256:0d64c6b62f6156d8e2653af795e1ac7f5234ce3cb3c0f9b48556175e2d599674
Deleted: sha256:11e9ca74e130f7e245751e1e3469f91e6f7b706b398f8381da0285aed20aa31e
Deleted: sha256:63db516b9beb277d9169a7ca5bc59cfc04c2bdc23d8bf3d81c62097dafe2e071
Deleted: sha256:2584a4c24a5237d894188c0843f024a29091035509033bff688792e954c4ee02
Deleted: sha256:43709709dbf61da82e55ff3df56c511facbcffcd423b7601d2235bcc12d06801
Build step 'Execute shell' marked build as failure