You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by na...@apache.org on 2019/08/23 16:56:37 UTC
[hadoop] branch trunk updated: HDDS-1978. Create helper script to
run blockade tests. (#1310)
This is an automated email from the ASF dual-hosted git repository.
nanda pushed a commit to branch trunk
in repository https://gitbox.apache.org/repos/asf/hadoop.git
The following commit(s) were added to refs/heads/trunk by this push:
new 20064b6 HDDS-1978. Create helper script to run blockade tests. (#1310)
20064b6 is described below
commit 20064b69a8a7926f2d80776b029da28d5f98f730
Author: Nanda kumar <na...@apache.org>
AuthorDate: Fri Aug 23 22:26:30 2019 +0530
HDDS-1978. Create helper script to run blockade tests. (#1310)
---
hadoop-ozone/dev-support/checks/blockade.sh | 28 ++++++++++++++++++++++
.../src/test/blockade/ozone/cluster.py | 14 ++++++++---
2 files changed, 39 insertions(+), 3 deletions(-)
diff --git a/hadoop-ozone/dev-support/checks/blockade.sh b/hadoop-ozone/dev-support/checks/blockade.sh
new file mode 100755
index 0000000..f8b25c1
--- /dev/null
+++ b/hadoop-ozone/dev-support/checks/blockade.sh
@@ -0,0 +1,28 @@
+#!/usr/bin/env bash
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
+cd "$DIR/../../.." || exit 1
+
+OZONE_VERSION=$(grep "<ozone.version>" "$DIR/../../pom.xml" | sed 's/<[^>]*>//g'| sed 's/^[ \t]*//')
+cd "$DIR/../../dist/target/ozone-$OZONE_VERSION/tests" || exit 1
+
+source ${DIR}/../../dist/target/ozone-${OZONE_VERSION}/compose/ozoneblockade/.env
+export HADOOP_RUNNER_VERSION
+export HDDS_VERSION
+
+python -m pytest -s blockade
+exit $?
diff --git a/hadoop-ozone/fault-injection-test/network-tests/src/test/blockade/ozone/cluster.py b/hadoop-ozone/fault-injection-test/network-tests/src/test/blockade/ozone/cluster.py
index f83ad25..1434266 100644
--- a/hadoop-ozone/fault-injection-test/network-tests/src/test/blockade/ozone/cluster.py
+++ b/hadoop-ozone/fault-injection-test/network-tests/src/test/blockade/ozone/cluster.py
@@ -19,7 +19,9 @@ import logging
import os
import re
import subprocess
+import sys
import yaml
+import time
from os import environ
@@ -146,11 +148,17 @@ class OzoneCluster(object):
"""
Start Ozone Cluster in docker containers.
"""
- # check if proper env $HDDS_VERSION and $HADOOP_RUNNER_VERSION
- # are set.
# check if docker is up.
+ if "HADOOP_RUNNER_VERSION" not in os.environ:
+ self.__logger__.error("HADOOP_RUNNER_VERSION is not set.")
+ sys.exit(1)
+
+ if "HDDS_VERSION" not in os.environ:
+ self.__logger__.error("HDDS_VERSION is not set.")
+ sys.exit(1)
+
self.__logger__.info("Starting Ozone Cluster")
if Blockade.blockade_status() == 0:
Blockade.blockade_destroy()
@@ -162,7 +170,7 @@ class OzoneCluster(object):
"datanode=" + str(self.conf.datanode_count)])
self.__logger__.info("Waiting 10s for cluster start up...")
# Remove the sleep and wait only till the cluster is out of safemode
- # time.sleep(10)
+ time.sleep(10)
output = subprocess.check_output([Command.docker_compose, "-f",
self.docker_compose_file, "ps"])
node_list = []
---------------------------------------------------------------------
To unsubscribe, e-mail: common-commits-unsubscribe@hadoop.apache.org
For additional commands, e-mail: common-commits-help@hadoop.apache.org