From 643e3b4316b6c59009c259b96d38495152989df4 Mon Sep 17 00:00:00 2001 From: Elena Gryaznova Date: Fri, 17 Jul 2015 19:32:49 +0300 Subject: [PATCH] LU-6867 test: detect active facet based on current state Lustre failover tests can not be ran test-by-test on the setup with ${facet}_HOST != ${facet}failover_HOST because of t-f does not restore facet state. t-f keeps this info in "${facet}active" files, which are created when facet_failover() is executed first time in the test session. Before facet_failover() executed these files are empty and active facet is ${facet} by default. In case when tests are executed test-by-test the active facet is ${facet}failover after 1st test completed, and 2nd test is started having ${facet}failover active without this info stored in ${facet}active files. Patch contains the following changes: - add the active facet detection based on current lustre state; - fix sanity-hsm defect: exist with error if agt${n}1_HOST is empty. Signed-off-by: Elena Gryaznova Xyratex-bug-id: MRP-2680 Reviewed-by: Alexander Lezhoev Reviewed-by: Andriy Skulysh Change-Id: Ie42baaa55a6433596e6004d16eb5c18ae2ef7479 Reviewed-on: https://review.whamcloud.com/15638 Tested-by: Jenkins Tested-by: Maloo Reviewed-by: Alexander Lezhoev Reviewed-by: Andriy Skulysh Reviewed-by: Oleg Drokin --- lustre/tests/sanity-hsm.sh | 2 ++ lustre/tests/test-framework.sh | 68 +++++++++++++++++++++++++++++++----------- 2 files changed, 52 insertions(+), 18 deletions(-) diff --git a/lustre/tests/sanity-hsm.sh b/lustre/tests/sanity-hsm.sh index 8669ba9..5a206b0 100755 --- a/lustre/tests/sanity-hsm.sh +++ b/lustre/tests/sanity-hsm.sh @@ -100,6 +100,8 @@ init_agt_vars() { agent=CLIENT2 fi eval export agt${n}_HOST=\$\{agt${n}_HOST:-${!agent}\} + local var=agt${n}_HOST + [[ ! -z "${!var}" ]] || error "agt${n}_HOST is empty!" done export SINGLEAGT=${SINGLEAGT:-agt1} diff --git a/lustre/tests/test-framework.sh b/lustre/tests/test-framework.sh index bf9336a..1f53fc9 100755 --- a/lustre/tests/test-framework.sh +++ b/lustre/tests/test-framework.sh @@ -3215,20 +3215,59 @@ facet_failover_host() { fi } +detect_active() { + local facet=$1 + [ "$CLIENTONLY" ] && echo $facet && return + + local failover=$(facet_failover_host $facet) + + # failover is not associated with all facet types: + # "AGT" facet type (remote HSM agents) does not + # have a failover. + [[ -z "$failover" ]] && echo $facet && return + + local host=$(facet_host $facet) + local dev=$(facet_device $facet) + + # ${facet}_svc can not be used here because of + # facet_active() is called before this var initialized + local svc=$(do_node $host $E2LABEL ${dev}) + + # active facet is ${facet}failover if device is mounted on failover + # on other cases active facet is $facet + [[ $dev = $(do_node $failover \ + lctl get_param -n *.$svc.mntdev 2>/dev/null) ]] && + echo ${facet}failover && return + + echo $facet +} + +init_active() { + local facet=$1 + + local active=$(detect_active $facet) + echo "${facet}active=$active" > $TMP/${facet}active +} + facet_active() { - local facet=$1 - local activevar=${facet}active + local facet=$1 + local activevar=${facet}active - if [ -f $TMP/${facet}active ] ; then - source $TMP/${facet}active - fi + # file is missing (nothing to store) if fail() is not + # executed during this test session yet; + # file content: + # ost1active=ost1failover + # ost1active=ost1 + # let's detect active facet based on current lustre state + if [ ! -f $TMP/${facet}active ] ; then + init_active $facet + fi + source $TMP/${facet}active - active=${!activevar} - if [ -z "$active" ] ; then - echo -n ${facet} - else - echo -n ${active} - fi + # is ${facet}active set somewhere else? + active=${!activevar} + [[ -z "$active" ]] && exit 1 + echo -n ${active} } facet_active_host() { @@ -3714,13 +3753,6 @@ unmount_fstype() { ## MountConf setup stopall() { - # make sure we are using the primary server, so test-framework will - # be able to clean up properly. - activemds=`facet_active mds1` - if [ $activemds != "mds1" ]; then - fail mds1 - fi - local clients=$CLIENTS [ -z $clients ] && clients=$(hostname) -- 1.8.3.1