From: Elena Gryaznova Date: Wed, 22 Dec 2010 20:16:47 +0000 (+0300) Subject: b=23051 improve summary of acc-sm to include test times X-Git-Tag: 1.8.5.52~9 X-Git-Url: https://git.whamcloud.com/?a=commitdiff_plain;h=ebf7dbd5c10d8d5f964fbe3bd195b549df0a4472;p=fs%2Flustre-release.git b=23051 improve summary of acc-sm to include test times i=Andreas.Dilger i=Andrew.Perepechko --- diff --git a/lustre/tests/conf-sanity.sh b/lustre/tests/conf-sanity.sh index 92434d6..8cf00f3 100644 --- a/lustre/tests/conf-sanity.sh +++ b/lustre/tests/conf-sanity.sh @@ -2361,7 +2361,5 @@ if ! combined_mgs_mds ; then stop mgs fi -equals_msg `basename $0`: test complete -[ -f "$TESTSUITELOG" ] && cat $TESTSUITELOG && grep -q FAIL $TESTSUITELOG && exit 1 || true - - +complete $(basename $0) $SECONDS +exit_status diff --git a/lustre/tests/insanity.sh b/lustre/tests/insanity.sh index 69f074d..8f40d52 100755 --- a/lustre/tests/insanity.sh +++ b/lustre/tests/insanity.sh @@ -564,6 +564,6 @@ test_10() { } run_test 10 "Running Availability for 6 hours..." -equals_msg `basename $0`: test complete, cleaning up +complete $(basename $0) $SECONDS check_and_cleanup_lustre -[ -f "$TESTSUITELOG" ] && cat $TESTSUITELOG && grep -q FAIL $TESTSUITELOG && exit 1 || true +exit_status diff --git a/lustre/tests/large-scale.sh b/lustre/tests/large-scale.sh index ddd1c28..51b8777 100644 --- a/lustre/tests/large-scale.sh +++ b/lustre/tests/large-scale.sh @@ -252,6 +252,6 @@ test_3a() { run_test 3a "recovery time, $CLIENTCOUNT clients" -equals_msg `basename $0`: test complete, cleaning up +complete $(basename $0) $SECONDS check_and_cleanup_lustre -[ -f "$TESTSUITELOG" ] && cat $TESTSUITELOG || true +exit_status diff --git a/lustre/tests/lfscktest.sh b/lustre/tests/lfscktest.sh index 7d17a39..b23559d 100755 --- a/lustre/tests/lfscktest.sh +++ b/lustre/tests/lfscktest.sh @@ -277,11 +277,8 @@ else fi fi -equals_msg $(basename $0): test complete, cleaning up - LFSCK_ALWAYS=no -check_and_cleanup_lustre -[ -f "$TESTSUITELOG" ] && cat $TESTSUITELOG && \ - grep -q FAIL $TESTSUITELOG && exit 1 || true -echo "$0: completed" +complete $(basename $0) $SECONDS +check_and_cleanup_lustre +exit_status diff --git a/lustre/tests/lnet-selftest.sh b/lustre/tests/lnet-selftest.sh index f95713d..f4dd5b3 100755 --- a/lustre/tests/lnet-selftest.sh +++ b/lustre/tests/lnet-selftest.sh @@ -153,9 +153,8 @@ test_smoke () { } run_test smoke "lst regression test" -equals_msg `basename $0`: test complete, cleaning up +complete $(basename $0) $SECONDS if [ "$RESTORE_MOUNT" = yes ]; then setupall fi -[ -f "$TESTSUITELOG" ] && cat $TESTSUITELOG && grep -q FAIL $TESTSUITELOG && exit 1 || true - +exit_status diff --git a/lustre/tests/metadata-updates.sh b/lustre/tests/metadata-updates.sh index cca2c64..9ef46ee 100755 --- a/lustre/tests/metadata-updates.sh +++ b/lustre/tests/metadata-updates.sh @@ -259,8 +259,8 @@ else skip_env "$0 : write_disjoint not found " fi -equals_msg `basename $0`: test complete, cleaning up +complete $(basename $0) $SECONDS rm -rf $TESTDIR rm -f $MACHINEFILE check_and_cleanup_lustre -exit $STATUS +exit_status diff --git a/lustre/tests/mmp.sh b/lustre/tests/mmp.sh index 3c5ea9d..884f3a6 100755 --- a/lustre/tests/mmp.sh +++ b/lustre/tests/mmp.sh @@ -622,8 +622,6 @@ run_test 10 "e2fsck with mounted filesystem" mmp_fini FAIL_ON_ERROR=$SAVED_FAIL_ON_ERROR -equals_msg $(basename $0): test complete +complete $(basename $0) $SECONDS $MMP_RESTORE_MOUNT && setupall -[ -f "$TESTSUITELOG" ] && cat $TESTSUITELOG && \ - grep -q FAIL $TESTSUITELOG && exit 1 || true -echo "$0: completed" +exit_status diff --git a/lustre/tests/obdfilter-survey.sh b/lustre/tests/obdfilter-survey.sh index 5b0cc38..ccdccdf 100644 --- a/lustre/tests/obdfilter-survey.sh +++ b/lustre/tests/obdfilter-survey.sh @@ -194,7 +194,7 @@ test_3a () { } run_test 3a "Network survey" -equals_msg `basename $0`: test complete, cleaning up +complete $(basename $0) $SECONDS cleanup_echo_devs check_and_cleanup_lustre -[ -f "$TESTSUITELOG" ] && cat $TESTSUITELOG && grep -q FAIL $TESTSUITELOG && exit 1 || true +exit_status diff --git a/lustre/tests/ost-pools.sh b/lustre/tests/ost-pools.sh index b1cbe56..c47dd3e 100644 --- a/lustre/tests/ost-pools.sh +++ b/lustre/tests/ost-pools.sh @@ -31,8 +31,6 @@ check_and_setup_lustre DIR=${DIR:-$MOUNT} assert_DIR -POOLSLOG=${TESTSUITELOG:-$TMP/$(basename $0 .sh).log} -[ "$POOLSLOG" ] && rm -f $POOLSLOG || true build_test_filter FAIL_ON_ERROR=${FAIL_ON_ERROR:-true} @@ -1350,10 +1348,9 @@ test_25() { } run_test 25 "Create new pool and restart MDS =======================" -log "cleanup: ======================================================" cd $ORIG_PWD + +complete $(basename $0) $SECONDS cleanup_pools $FSNAME check_and_cleanup_lustre -echo '=========================== finished ===============================' -[ -f "$POOLSLOG" ] && cat $POOLSLOG && grep -q FAIL $POOLSLOG && exit 1 || true -echo "$0: completed" +exit_status diff --git a/lustre/tests/parallel-scale.sh b/lustre/tests/parallel-scale.sh index 2f06960..061db6d 100644 --- a/lustre/tests/parallel-scale.sh +++ b/lustre/tests/parallel-scale.sh @@ -615,6 +615,6 @@ test_statahead () { run_test statahead "statahead test, multiple clients" -equals_msg `basename $0`: test complete, cleaning up +complete $(basename $0) $SECONDS check_and_cleanup_lustre -[ -f "$TESTSUITELOG" ] && cat $TESTSUITELOG && grep -q FAIL $TESTSUITELOG && exit 1 || true +exit_status diff --git a/lustre/tests/performance-sanity.sh b/lustre/tests/performance-sanity.sh index 1846fd8..918b891 100644 --- a/lustre/tests/performance-sanity.sh +++ b/lustre/tests/performance-sanity.sh @@ -76,7 +76,7 @@ test_8() { } run_test 8 "getattr large files ======" -equals_msg `basename $0`: test complete, cleaning up +complete $(basename $0) $SECONDS check_and_cleanup_lustre [ -f "$LOG" ] && cat $LOG || true -[ -f "$TESTSUITELOG" ] && cat $TESTSUITELOG && grep -q FAIL $TESTSUITELOG && exit 1 || true +exit_status diff --git a/lustre/tests/recovery-small.sh b/lustre/tests/recovery-small.sh index 5c18236..0897f01 100755 --- a/lustre/tests/recovery-small.sh +++ b/lustre/tests/recovery-small.sh @@ -1052,6 +1052,6 @@ test_61() } run_test 61 "Verify to not reuse orphan objects - bug 17485" -equals_msg `basename $0`: test complete, cleaning up +complete $(basename $0) $SECONDS check_and_cleanup_lustre -[ -f "$TESTSUITELOG" ] && cat $TESTSUITELOG && grep -q FAIL $TESTSUITELOG && exit 1 || true +exit_status diff --git a/lustre/tests/replay-dual.sh b/lustre/tests/replay-dual.sh index f814369..449c4ab 100755 --- a/lustre/tests/replay-dual.sh +++ b/lustre/tests/replay-dual.sh @@ -452,9 +452,9 @@ test_22() { #bug 18927 } run_test 22 "double open|creat in replay with open orphan from two mntp" -equals_msg `basename $0`: test complete, cleaning up +complete $(basename $0) $SECONDS SLEEP=$((`date +%s` - $NOW)) [ $SLEEP -lt $TIMEOUT ] && sleep $SLEEP [ "$MOUNTED2" = yes ] && zconf_umount $HOSTNAME $MOUNT2 || true check_and_cleanup_lustre -[ -f "$TESTSUITELOG" ] && cat $TESTSUITELOG && grep -q FAIL $TESTSUITELOG && exit 1 || true +exit_status diff --git a/lustre/tests/replay-ost-single.sh b/lustre/tests/replay-ost-single.sh index 98c9f7d..563a27a 100755 --- a/lustre/tests/replay-ost-single.sh +++ b/lustre/tests/replay-ost-single.sh @@ -230,6 +230,6 @@ test_7() { } run_test 7 "Fail OST before obd_destroy" -equals_msg `basename $0`: test complete, cleaning up +complete $(basename $0) $SECONDS check_and_cleanup_lustre -[ -f "$TESTSUITELOG" ] && cat $TESTSUITELOG && grep -q FAIL $TESTSUITELOG && exit 1 || true +exit_status diff --git a/lustre/tests/replay-single.sh b/lustre/tests/replay-single.sh index 1b6686c..e9aeb50 100644 --- a/lustre/tests/replay-single.sh +++ b/lustre/tests/replay-single.sh @@ -2268,6 +2268,6 @@ test_87() { #bug 17485 } run_test 87 "MDS should not assign same objid to different files " -equals_msg `basename $0`: test complete, cleaning up +complete $(basename $0) $SECONDS check_and_cleanup_lustre -[ -f "$TESTSUITELOG" ] && cat $TESTSUITELOG && grep -q FAIL $TESTSUITELOG && exit 1 || true +exit_status diff --git a/lustre/tests/replay-vbr.sh b/lustre/tests/replay-vbr.sh index 36c7c66..3cd1511 100644 --- a/lustre/tests/replay-vbr.sh +++ b/lustre/tests/replay-vbr.sh @@ -1288,8 +1288,6 @@ run_test 10 "mds version recovery; $CLIENTCOUNT clients" [ "$CLIENTS" ] && zconf_mount_clients $CLIENTS $DIR -equals_msg `basename $0`: test complete, cleaning up -#SLEEP=$((`date +%s` - $NOW)) -#[ $SLEEP -lt $TIMEOUT ] && sleep $SLEEP +complete $(basename $0) $SECONDS check_and_cleanup_lustre -[ -f "$TESTSUITELOG" ] && cat $TESTSUITELOG && grep -q FAIL $TESTSUITELOG && exit 1 || true +exit_status diff --git a/lustre/tests/runracer b/lustre/tests/runracer index b67a819..d81d4b1 100644 --- a/lustre/tests/runracer +++ b/lustre/tests/runracer @@ -145,5 +145,5 @@ for rpid in $RACERPID; do done racer_cleanup -echo "$0: completed $RC" +complete $(basename $0) $SECONDS exit $RC diff --git a/lustre/tests/runtests b/lustre/tests/runtests index 6c2c9d1..f99f69f 100755 --- a/lustre/tests/runtests +++ b/lustre/tests/runtests @@ -123,5 +123,7 @@ if [ `expr $NOWUSED - $USED` -gt 1024 ]; then echo "Space not all freed: now ${NOWUSED}kB, was ${USED}kB." 1>&2 fi +complete $(basename $0) $SECONDS rm -f $FILES check_and_cleanup_lustre +exit_status diff --git a/lustre/tests/sanity-benchmark.sh b/lustre/tests/sanity-benchmark.sh index 5484a8d..1f12b76 100644 --- a/lustre/tests/sanity-benchmark.sh +++ b/lustre/tests/sanity-benchmark.sh @@ -331,6 +331,6 @@ test_pios_fpp() { } run_test pios_fpp "pios file per process" -equals_msg `basename $0`: test complete, cleaning up +complete $(basename $0) $SECONDS check_and_cleanup_lustre -[ -f "$TESTSUITELOG" ] && cat $TESTSUITELOG && grep -q FAIL $TESTSUITELOG && exit 1 || true +exit_status diff --git a/lustre/tests/sanity-quota.sh b/lustre/tests/sanity-quota.sh index 03b2e74..0dc438f 100755 --- a/lustre/tests/sanity-quota.sh +++ b/lustre/tests/sanity-quota.sh @@ -2373,10 +2373,7 @@ test_99() } run_test_with_stat 99 "Quota off ===============================" - -log "cleanup: ======================================================" cd $ORIG_PWD +complete $(basename $0) $SECONDS check_and_cleanup_lustre -echo '=========================== finished ===============================' -[ -f "$QUOTALOG" ] && cat $QUOTALOG && grep -q FAIL $QUOTALOG && exit 1 || true -echo "$0: completed" +exit_status diff --git a/lustre/tests/sanity.sh b/lustre/tests/sanity.sh index 6da0966..b068b15 100644 --- a/lustre/tests/sanity.sh +++ b/lustre/tests/sanity.sh @@ -68,7 +68,6 @@ init_test_env $@ [ "$SLOW" = "no" ] && EXCEPT_SLOW="24o 27m 36f 36g 36h 51b 51c 60c 63 64b 68 71 73 77f 78 101 103 115 120g 124b" -SANITYLOG=${TESTSUITELOG:-$TMP/$(basename $0 .sh).log} FAIL_ON_ERROR=${FAIL_ON_ERROR:-false} cleanup() { @@ -101,8 +100,6 @@ if [ "$ONLY" == "cleanup" ]; then exit 0 fi -[ "$SANITYLOG" ] && rm -f $SANITYLOG || true - check_and_setup_lustre DIR=${DIR:-$MOUNT} @@ -6922,12 +6919,9 @@ test_900() { } run_test 900 "umount should not race with any mgc requeue thread" -log "cleanup: ======================================================" +complete $(basename $0) $SECONDS check_and_cleanup_lustre if [ "$I_MOUNTED" != "yes" ]; then lctl set_param debug="$OLDDEBUG" 2> /dev/null || true fi - -echo '=========================== finished ===============================' -[ -f "$SANITYLOG" ] && cat $SANITYLOG && grep -q FAIL $SANITYLOG && exit 1 || true -echo "$0: completed" +exit_status diff --git a/lustre/tests/sanityN.sh b/lustre/tests/sanityN.sh index b15482c..f0521bb 100644 --- a/lustre/tests/sanityN.sh +++ b/lustre/tests/sanityN.sh @@ -41,14 +41,11 @@ init_test_env $@ [ "$SLOW" = "no" ] && EXCEPT_SLOW="12 16" -SANITYLOG=${TESTSUITELOG:-$TMP/$(basename $0 .sh).log} FAIL_ON_ERROR=${FAIL_ON_ERROR:-false} SETUP=${SETUP:-:} TRACE=${TRACE:-""} -[ "$SANITYLOG" ] && rm -f $SANITYLOG || true - check_and_setup_lustre LOVNAME=`lctl get_param -n llite.*.lov.common_name | tail -n 1` @@ -917,11 +914,6 @@ test_39() { } run_test 39 "direct I/O writes should update mtime =========" -log "cleanup: ======================================================" - +complete $(basename $0) $SECONDS check_and_cleanup_lustre - -echo '=========================== finished ===============================' -[ -f "$SANITYLOG" ] && cat $SANITYLOG && grep -q FAIL $SANITYLOG && exit 1 || true -echo "$0: completed" - +exit_status diff --git a/lustre/tests/sgpdd-survey.sh b/lustre/tests/sgpdd-survey.sh index a9720e6..baf4bb9 100644 --- a/lustre/tests/sgpdd-survey.sh +++ b/lustre/tests/sgpdd-survey.sh @@ -72,5 +72,5 @@ test_2 () { } run_test 2 "sgpdd-survey, osts, scsidevs" -equals_msg `basename $0`: test complete, cleaning up -[ -f "$TESTSUITELOG" ] && cat $TESTSUITELOG || true +complete $(basename $0) $SECONDS +exit_status diff --git a/lustre/tests/test-framework.sh b/lustre/tests/test-framework.sh index b8be03e..529e123 100644 --- a/lustre/tests/test-framework.sh +++ b/lustre/tests/test-framework.sh @@ -49,23 +49,37 @@ usage() { print_summary () { trap 0 [ "$TESTSUITE" == "lfscktest" ] && return 0 - [ -n "$ONLY" ] && echo "WARNING: ONLY is set to ${ONLY}." - local form="%-13s %-17s %s\n" - printf "$form" "status" "script" "skipped tests E(xcluded) S(low)" + [ -n "$ONLY" ] && echo "WARNING: ONLY is set to $(echo $ONLY)" + local details + local form="%-13s %-17s %-9s %s %s\n" + printf "$form" "status" "script" "Total(sec)" "E(xcluded) S(low)" echo "------------------------------------------------------------------------------------" for O in $TESTSUITE_LIST; do - local skipped="" - local slow="" + [ "${!O}" = "no" ] && continue || true local o=$(echo $O | tr "[:upper:]" "[:lower:]") o=${o//_/-} o=${o//tyn/tyN} local log=${TMP}/${o}.log - [ -f $log ] && skipped=$(grep excluded $log | awk '{ printf " %s", $3 }' | sed 's/test_//g') - [ -f $log ] && slow=$(grep SLOW $log | awk '{ printf " %s", $3 }' | sed 's/test_//g') - [ "${!O}" = "done" ] && \ - printf "$form" "Done" "$O" "E=$skipped" && \ - [ -n "$slow" ] && printf "$form" "-" "-" "S=$slow" - + [ "$o" = lfsck ] && log=${TMP}/lfscktest.log + [ "$o" = racer ] && log=${TMP}/runracer.log + local slow= + local skipped= + local total= + local status=Unfinished + if [ -f $log ]; then + skipped=$(grep excluded $log | awk '{ printf " %s", $3 }' | sed 's/test_//g') + slow=$(egrep "^PASS|^FAIL" $log | tr -d "("| sed s/s\)$//g | sort -nr -k 3 | head -5 | awk '{ print $2":"$3"s" }') + total=$(grep duration $log | awk '{ print $2}') + if [ "${!O}" = "done" ]; then + status=Done + fi + if $DDETAILS; then + local durations=$(egrep "^PASS|^FAIL" $log | tr -d "("| sed s/s\)$//g | awk '{ print $2":"$3"|" }') + details=$(printf "%s\n%s %s %s\n" "$details" "DDETAILS" "$O" "$(echo $durations)") + fi + fi + printf "$form" $status "$O" "${total}" "E=$skipped" + printf "$form" "-" "-" "-" "S=$(echo $slow)" done for O in $TESTSUITE_LIST; do @@ -81,10 +95,10 @@ print_summary () { fi done - for O in $TESTSUITE_LIST; do - [ "${!O}" = "done" -o "${!O}" = "no" ] || \ - printf "$form" "UNFINISHED" "$O" "" - done + # print the detailed tests durations if DDETAILS=true + if $DDETAILS; then + echo "$details" + fi } init_test_env() { @@ -195,6 +209,8 @@ init_test_env() { shift $((OPTIND - 1)) ONLY=${ONLY:-$*} + # print the durations of each test if "true" + DDETAILS=${DDETAILS:-false} [ "$TESTSUITELOG" ] && rm -f $TESTSUITELOG || true rm -f $TMP/*active } @@ -2580,6 +2596,14 @@ error_noexit() { TEST_FAILED=true } +exit_status () { + local status=0 + local log=$TESTSUITELOG + + [ -f "$log" ] && grep -q FAIL $log && status=1 + exit $status +} + error() { error_noexit "$@" if $FAIL_ON_ERROR; then @@ -2730,9 +2754,16 @@ trace() { return 1 } +complete () { + equals_msg $1 test complete, duration $2 sec + [ -f "$TESTSUITELOG" ] && egrep .FAIL $TESTSUITELOG || true + echo duration $2 >>$TESTSUITELOG +} + pass() { - $TEST_FAILED && echo -n "FAIL " || echo -n "PASS " - echo $@ + local status=PASS + $TEST_FAILED && status=FAIL + echo "$status $testnum $@" 2>&1 | tee -a $TESTSUITELOG } check_mds() {