From 8c4f96f910786ff3d73474ef5f8d4a96a30a0bed Mon Sep 17 00:00:00 2001 From: Elena Gryaznova Date: Mon, 20 Dec 2010 21:41:36 +0300 Subject: [PATCH] b=23051 improve summary of acc-sm to include test times i=Andreas.Dilger --- lustre/tests/acceptance-small.sh | 21 +++------ lustre/tests/conf-sanity.sh | 6 ++- lustre/tests/insanity.sh | 4 +- lustre/tests/large-scale.sh | 4 +- lustre/tests/lfsck.sh | 9 ++-- lustre/tests/liblustre.sh | 4 +- lustre/tests/lnet-selftest.sh | 5 +-- lustre/tests/lustre-rsync-test.sh | 10 +---- lustre/tests/metadata-updates.sh | 4 +- lustre/tests/mmp.sh | 6 +-- lustre/tests/obdfilter-survey.sh | 4 +- lustre/tests/ost-pools.sh | 9 ++-- lustre/tests/parallel-scale.sh | 4 +- lustre/tests/performance-sanity.sh | 4 +- lustre/tests/recovery-small.sh | 4 +- lustre/tests/replay-dual.sh | 4 +- lustre/tests/replay-ost-single.sh | 4 +- lustre/tests/replay-single.sh | 4 +- lustre/tests/replay-vbr.sh | 4 +- lustre/tests/runracer | 4 +- lustre/tests/runtests | 2 + lustre/tests/sanity-benchmark.sh | 4 +- lustre/tests/sanity-gss.sh | 4 +- lustre/tests/sanity-quota.sh | 6 +-- lustre/tests/sanity-sec.sh | 7 +-- lustre/tests/sanity.sh | 10 +---- lustre/tests/sanityn.sh | 10 ++--- lustre/tests/sgpdd-survey.sh | 4 +- lustre/tests/test-framework.sh | 90 ++++++++++++++++++++++++++------------ 29 files changed, 126 insertions(+), 129 deletions(-) diff --git a/lustre/tests/acceptance-small.sh b/lustre/tests/acceptance-small.sh index 67b4e40..0709852 100755 --- a/lustre/tests/acceptance-small.sh +++ b/lustre/tests/acceptance-small.sh @@ -10,11 +10,11 @@ export OSKIPPED=0 # This is the default set of tests to run. DEFAULT_SUITES="runtests sanity sanity-benchmark sanityn lfsck liblustre runracer replay-single conf-sanity recovery-small - replay-ost-single replay-dual insanity sanity-quota sanity-sec - sanity-gss performance-sanity large-scale recovery-mds-scale - recovery-double-scale recovery-random-scale parallel-scale - lustre_rsync-test metadata-updates ost-pools lnet-selftest - mmp obdfilter-survey sgpdd-survey" + replay-ost-single replay-dual replay-vbr insanity sanity-quota + sanity-sec sanity-gss performance-sanity large-scale + recovery-mds-scale recovery-double-scale recovery-random-scale + parallel-scale lustre_rsync-test metadata-updates ost-pools + lnet-selftest mmp obdfilter-survey sgpdd-survey" if [[ -n $@ ]]; then ACC_SM_ONLY="${ACC_SM_ONLY} $@" @@ -109,17 +109,6 @@ title() { log "-----============= acceptance-small: "$*" ============----- $(date)" } -is_sanity_benchmark() { - local benchmarks="dbench bonnie iozone fsx" - local suite=$1 - for b in $benchmarks; do - if [ "$b" == "$suite" ]; then - return 0 - fi - done - return 1 -} - run_suite() { local suite_name=$(echo ${1%.sh} | tr "[:upper:]_" "[:lower:]-" ) local suite=$(echo ${suite_name} | tr "[:lower:]-" "[:upper:]_") diff --git a/lustre/tests/conf-sanity.sh b/lustre/tests/conf-sanity.sh index 6d9c9b0..c852868 100644 --- a/lustre/tests/conf-sanity.sh +++ b/lustre/tests/conf-sanity.sh @@ -2703,6 +2703,8 @@ run_test 58 "missing llog files must not prevent MDT from mounting" if ! combined_mgs_mds ; then stop mgs fi + cleanup_gss -equals_msg `basename $0`: test complete -[ -f "$TESTSUITELOG" ] && cat $TESTSUITELOG && grep -q FAIL $TESTSUITELOG && exit 1 || true + +complete $(basename $0) $SECONDS +exit_status diff --git a/lustre/tests/insanity.sh b/lustre/tests/insanity.sh index 6ceecf8..f0cb403 100755 --- a/lustre/tests/insanity.sh +++ b/lustre/tests/insanity.sh @@ -557,6 +557,6 @@ test_10() { } run_test 10 "Running Availability for 6 hours..." -equals_msg `basename $0`: test complete, cleaning up +complete $(basename $0) $SECONDS check_and_cleanup_lustre -[ -f "$TESTSUITELOG" ] && cat $TESTSUITELOG && grep -q FAIL $TESTSUITELOG && exit 1 || true +exit_status diff --git a/lustre/tests/large-scale.sh b/lustre/tests/large-scale.sh index f97fba9..571f1ec 100644 --- a/lustre/tests/large-scale.sh +++ b/lustre/tests/large-scale.sh @@ -117,6 +117,6 @@ test_3a() { run_test 3a "recovery time, $CLIENTCOUNT clients" -equals_msg `basename $0`: test complete, cleaning up +complete $(basename $0) $SECONDS check_and_cleanup_lustre -[ -f "$TESTSUITELOG" ] && cat $TESTSUITELOG || true +exit_status diff --git a/lustre/tests/lfsck.sh b/lustre/tests/lfsck.sh index 53a2ca0..04f3d56 100644 --- a/lustre/tests/lfsck.sh +++ b/lustre/tests/lfsck.sh @@ -279,11 +279,8 @@ else fi fi -equals_msg $(basename $0): test complete, cleaning up - LFSCK_ALWAYS=no -check_and_cleanup_lustre -[ -f "$TESTSUITELOG" ] && cat $TESTSUITELOG && \ - grep -q FAIL $TESTSUITELOG && exit 1 || true -echo "$0: completed" +complete $(basename $0) $SECONDS +check_and_cleanup_lustre +exit_status diff --git a/lustre/tests/liblustre.sh b/lustre/tests/liblustre.sh index 638e198..0ad8c35 100644 --- a/lustre/tests/liblustre.sh +++ b/lustre/tests/liblustre.sh @@ -40,6 +40,6 @@ test_1() { } run_test 1 "liblustre sanity" -banner "test complete, cleaning up" +complete $(basename $0) $SECONDS check_and_cleanup_lustre -[ -f "$TESTSUITELOG" ] && cat $TESTSUITELOG && grep -q FAIL $TESTSUITELOG && exit 1 || true +exit_status diff --git a/lustre/tests/lnet-selftest.sh b/lustre/tests/lnet-selftest.sh index c34a7b6..06d3b57 100755 --- a/lustre/tests/lnet-selftest.sh +++ b/lustre/tests/lnet-selftest.sh @@ -153,9 +153,8 @@ test_smoke () { } run_test smoke "lst regression test" -equals_msg `basename $0`: test complete, cleaning up +complete $(basename $0) $SECONDS if [ "$RESTORE_MOUNT" = yes ]; then setupall fi -[ -f "$TESTSUITELOG" ] && cat $TESTSUITELOG && grep -q FAIL $TESTSUITELOG && exit 1 || true - +exit_status diff --git a/lustre/tests/lustre-rsync-test.sh b/lustre/tests/lustre-rsync-test.sh index 6566850..3064507 100644 --- a/lustre/tests/lustre-rsync-test.sh +++ b/lustre/tests/lustre-rsync-test.sh @@ -30,10 +30,6 @@ init_test_env $@ . ${CONFIG:=$LUSTRE/tests/cfg/$NAME.sh} init_logging -REPLLOG=${TESTSUITELOG:-$TMP/$(basename $0 .sh).log} - -[ "$REPLLOG" ] && rm -f $REPLLOG || true - check_and_setup_lustre DIR=${DIR:-$MOUNT} @@ -585,9 +581,7 @@ test_9() { } run_test 9 "Replicate recursive directory removal" -log "cleanup: ======================================================" cd $ORIG_PWD +complete $(basename $0) $SECONDS check_and_cleanup_lustre -echo '=========================== finished ===============================' -[ -f "$REPLLOG" ] && cat $REPLLOG && grep -q FAIL $REPLLOG && exit 1 || true -echo "$0: completed" +exit_status diff --git a/lustre/tests/metadata-updates.sh b/lustre/tests/metadata-updates.sh index 0e9f0eb..0228c2b 100755 --- a/lustre/tests/metadata-updates.sh +++ b/lustre/tests/metadata-updates.sh @@ -260,8 +260,8 @@ else skip_env "$0 : write_disjoint not found " fi -equals_msg `basename $0`: test complete, cleaning up +complete $(basename $0) $SECONDS rm -rf $TESTDIR rm -f $MACHINEFILE check_and_cleanup_lustre -exit $STATUS +exit_status diff --git a/lustre/tests/mmp.sh b/lustre/tests/mmp.sh index 1749673..d4243f8 100755 --- a/lustre/tests/mmp.sh +++ b/lustre/tests/mmp.sh @@ -626,8 +626,6 @@ run_test 10 "e2fsck with mounted filesystem" mmp_fini FAIL_ON_ERROR=$SAVED_FAIL_ON_ERROR -equals_msg $(basename $0): test complete +complete $(basename $0) $SECONDS $MMP_RESTORE_MOUNT && setupall -[ -f "$TESTSUITELOG" ] && cat $TESTSUITELOG && \ - grep -q FAIL $TESTSUITELOG && exit 1 || true -echo "$0: completed" +exit_status diff --git a/lustre/tests/obdfilter-survey.sh b/lustre/tests/obdfilter-survey.sh index 66ec822..f5cd00d 100644 --- a/lustre/tests/obdfilter-survey.sh +++ b/lustre/tests/obdfilter-survey.sh @@ -187,7 +187,7 @@ test_3a () { } run_test 3a "Network survey" -equals_msg `basename $0`: test complete, cleaning up +complete $(basename $0) $SECONDS cleanup_echo_devs check_and_cleanup_lustre -[ -f "$TESTSUITELOG" ] && cat $TESTSUITELOG && grep -q FAIL $TESTSUITELOG && exit 1 || true +exit_status diff --git a/lustre/tests/ost-pools.sh b/lustre/tests/ost-pools.sh index 11404d0..edd34e0 100644 --- a/lustre/tests/ost-pools.sh +++ b/lustre/tests/ost-pools.sh @@ -32,8 +32,6 @@ check_and_setup_lustre DIR=${DIR:-$MOUNT} assert_DIR -POOLSLOG=${TESTSUITELOG:-$TMP/$(basename $0 .sh).log} -[ "$POOLSLOG" ] && rm -f $POOLSLOG || true build_test_filter FAIL_ON_ERROR=${FAIL_ON_ERROR:-true} @@ -1395,10 +1393,9 @@ test_26() { } run_test 26 "Choose other OSTs in the pool first in the creation remedy" -log "cleanup: ======================================================" cd $ORIG_PWD + +complete $(basename $0) $SECONDS cleanup_pools $FSNAME check_and_cleanup_lustre -echo '=========================== finished ===============================' -[ -f "$POOLSLOG" ] && cat $POOLSLOG && grep -q FAIL $POOLSLOG && exit 1 || true -echo "$0: completed" +exit_status diff --git a/lustre/tests/parallel-scale.sh b/lustre/tests/parallel-scale.sh index bc208a6..b64b78c 100644 --- a/lustre/tests/parallel-scale.sh +++ b/lustre/tests/parallel-scale.sh @@ -620,6 +620,6 @@ test_statahead () { run_test statahead "statahead test, multiple clients" -equals_msg `basename $0`: test complete, cleaning up +complete $(basename $0) $SECONDS check_and_cleanup_lustre -[ -f "$TESTSUITELOG" ] && cat $TESTSUITELOG && grep -q FAIL $TESTSUITELOG && exit 1 || true +exit_status diff --git a/lustre/tests/performance-sanity.sh b/lustre/tests/performance-sanity.sh index cae7b00..fd13747 100644 --- a/lustre/tests/performance-sanity.sh +++ b/lustre/tests/performance-sanity.sh @@ -81,7 +81,7 @@ test_8() { } run_test 8 "getattr large files ======" -equals_msg `basename $0`: test complete, cleaning up +complete $(basename $0) $SECONDS check_and_cleanup_lustre [ -f "$LOG" ] && cat $LOG || true -[ -f "$TESTSUITELOG" ] && cat $TESTSUITELOG && grep -q FAIL $TESTSUITELOG && exit 1 || true +exit_status diff --git a/lustre/tests/recovery-small.sh b/lustre/tests/recovery-small.sh index cb75978..2f3e767 100755 --- a/lustre/tests/recovery-small.sh +++ b/lustre/tests/recovery-small.sh @@ -1097,6 +1097,6 @@ test_61() } run_test 61 "Verify to not reuse orphan objects - bug 17025" -equals_msg `basename $0`: test complete, cleaning up +complete $(basename $0) $SECONDS check_and_cleanup_lustre -[ -f "$TESTSUITELOG" ] && cat $TESTSUITELOG && grep -q FAIL $TESTSUITELOG && exit 1 || true +exit_status diff --git a/lustre/tests/replay-dual.sh b/lustre/tests/replay-dual.sh index ece5b6a..e101d7c 100755 --- a/lustre/tests/replay-dual.sh +++ b/lustre/tests/replay-dual.sh @@ -549,9 +549,9 @@ run_test 21b "commit on sharing, two clients" # end commit on sharing tests -equals_msg `basename $0`: test complete, cleaning up +complete $(basename $0) $SECONDS SLEEP=$((`date +%s` - $NOW)) [ $SLEEP -lt $TIMEOUT ] && sleep $SLEEP [ "$MOUNTED2" = yes ] && zconf_umount $HOSTNAME $MOUNT2 || true check_and_cleanup_lustre -[ -f "$TESTSUITELOG" ] && cat $TESTSUITELOG && grep -q FAIL $TESTSUITELOG && exit 1 || true +exit_status diff --git a/lustre/tests/replay-ost-single.sh b/lustre/tests/replay-ost-single.sh index 8f2c799..7a74166 100755 --- a/lustre/tests/replay-ost-single.sh +++ b/lustre/tests/replay-ost-single.sh @@ -231,6 +231,6 @@ test_7() { } run_test 7 "Fail OST before obd_destroy" -equals_msg `basename $0`: test complete, cleaning up +complete $(basename $0) $SECONDS check_and_cleanup_lustre -[ -f "$TESTSUITELOG" ] && cat $TESTSUITELOG && grep -q FAIL $TESTSUITELOG && exit 1 || true +exit_status diff --git a/lustre/tests/replay-single.sh b/lustre/tests/replay-single.sh index 3cb5662..16e297d 100755 --- a/lustre/tests/replay-single.sh +++ b/lustre/tests/replay-single.sh @@ -2223,6 +2223,6 @@ test_89() { run_test 89 "no disk space leak on late ost connection" -equals_msg `basename $0`: test complete, cleaning up +complete $(basename $0) $SECONDS check_and_cleanup_lustre -[ -f "$TESTSUITELOG" ] && cat $TESTSUITELOG && grep -q FAIL $TESTSUITELOG && exit 1 || true +exit_status diff --git a/lustre/tests/replay-vbr.sh b/lustre/tests/replay-vbr.sh index 4202032..e1af41b 100644 --- a/lustre/tests/replay-vbr.sh +++ b/lustre/tests/replay-vbr.sh @@ -1132,6 +1132,6 @@ rm -f $cos_param_file [ "$CLIENTS" ] && zconf_mount_clients $CLIENTS $DIR -equals_msg `basename $0`: test complete, cleaning up +complete $(basename $0) $SECONDS check_and_cleanup_lustre -[ -f "$TESTSUITELOG" ] && cat $TESTSUITELOG && grep -q FAIL $TESTSUITELOG && exit 1 || true +exit_status diff --git a/lustre/tests/runracer b/lustre/tests/runracer index 083a56e..344fed2 100644 --- a/lustre/tests/runracer +++ b/lustre/tests/runracer @@ -158,6 +158,6 @@ test_1() { } run_test 1 "racer on clients: $CLIENTS DURATION=$DURATION" -equals_msg `basename $0`: test complete, cleaning up +complete $(basename $0) $SECONDS check_and_cleanup_lustre -[ -f "$TESTSUITELOG" ] && cat $TESTSUITELOG && grep -q FAIL $TESTSUITELOG && exit 1 || true +exit_status diff --git a/lustre/tests/runtests b/lustre/tests/runtests index 2cc27e8..1416303 100755 --- a/lustre/tests/runtests +++ b/lustre/tests/runtests @@ -124,5 +124,7 @@ if [ `expr $NOWUSED - $USED` -gt 1024 ]; then echo "Space not all freed: now ${NOWUSED}kB, was ${USED}kB." 1>&2 fi +complete $(basename $0) $SECONDS rm -f $FILES check_and_cleanup_lustre +exit_status diff --git a/lustre/tests/sanity-benchmark.sh b/lustre/tests/sanity-benchmark.sh index dbd4bac..c573af1 100644 --- a/lustre/tests/sanity-benchmark.sh +++ b/lustre/tests/sanity-benchmark.sh @@ -326,6 +326,6 @@ test_pios_fpp() { } run_test pios_fpp "pios file per process" -equals_msg `basename $0`: test complete, cleaning up +complete $(basename $0) $SECONDS check_and_cleanup_lustre -[ -f "$TESTSUITELOG" ] && cat $TESTSUITELOG && grep -q FAIL $TESTSUITELOG && exit 1 || true +exit_status diff --git a/lustre/tests/sanity-gss.sh b/lustre/tests/sanity-gss.sh index cf5685a..575dda4 100644 --- a/lustre/tests/sanity-gss.sh +++ b/lustre/tests/sanity-gss.sh @@ -793,6 +793,6 @@ test_151() { } run_test 151 "secure mgs connection: server flavor control" -equals_msg `basename $0`: test complete, cleaning up +complete $(basename $0) $SECONDS check_and_cleanup_lustre -[ -f "$TESTSUITELOG" ] && cat $TESTSUITELOG && grep -q FAIL $TESTSUITELOG && exit 1 || true +exit_status diff --git a/lustre/tests/sanity-quota.sh b/lustre/tests/sanity-quota.sh index 335df41..cb0241c 100644 --- a/lustre/tests/sanity-quota.sh +++ b/lustre/tests/sanity-quota.sh @@ -2204,10 +2204,8 @@ quota_fini() } quota_fini -log "cleanup: ======================================================" cd $ORIG_PWD +complete $(basename $0) $SECONDS check_and_cleanup_lustre -echo '=========================== finished ===============================' export QUOTA_AUTO=$QUOTA_AUTO_OLD -[ -f "$QUOTALOG" ] && cat $QUOTALOG && grep -q FAIL $QUOTALOG && exit 1 || true -echo "$0: completed" +exit_status diff --git a/lustre/tests/sanity-sec.sh b/lustre/tests/sanity-sec.sh index d592192..f320e41 100644 --- a/lustre/tests/sanity-sec.sh +++ b/lustre/tests/sanity-sec.sh @@ -30,7 +30,6 @@ WTL=${WTL:-"$LUSTRE/tests/write_time_limit"} CONFDIR=/etc/lustre PERM_CONF=$CONFDIR/perm.conf -SANITYSECLOG=${TESTSUITELOG:-$TMP/$(basename $0 .sh).log} FAIL_ON_ERROR=false require_dsh_mds || exit 0 @@ -573,7 +572,5 @@ sec_unsetup sec_cleanup -echo '=========================== finished ===============================' -[ -f "$SANITYSECLOG" ] && \ - cat $SANITYSECLOG && grep -q FAIL $SANITYSECLOG && exit 1 || true -echo "$0 completed" +complete $(basename $0) $SECONDS +exit_status diff --git a/lustre/tests/sanity.sh b/lustre/tests/sanity.sh index 4074a4f..8c80eac 100644 --- a/lustre/tests/sanity.sh +++ b/lustre/tests/sanity.sh @@ -72,7 +72,6 @@ init_logging [ "$SLOW" = "no" ] && EXCEPT_SLOW="24o 24v 27m 36f 36g 36h 51b 51c 60c 63 64b 68 71 73 77f 78 101 103 115 120g 124b" -SANITYLOG=${TESTSUITELOG:-$TMP/$(basename $0 .sh).log} FAIL_ON_ERROR=false cleanup() { @@ -103,8 +102,6 @@ if [ "$ONLY" == "cleanup" ]; then exit 0 fi -[ "$SANITYLOG" ] && rm -f $SANITYLOG || true - check_and_setup_lustre DIR=${DIR:-$MOUNT} @@ -7793,12 +7790,9 @@ test_900() { } run_test 900 "umount should not race with any mgc requeue thread" -log "cleanup: ======================================================" +complete $(basename $0) $SECONDS check_and_cleanup_lustre if [ "$I_MOUNTED" != "yes" ]; then lctl set_param debug="$OLDDEBUG" 2> /dev/null || true fi - -echo '=========================== finished ===============================' -[ -f "$SANITYLOG" ] && cat $SANITYLOG && grep -q FAIL $SANITYLOG && exit 1 || true -echo "$0: completed" +exit_status diff --git a/lustre/tests/sanityn.sh b/lustre/tests/sanityn.sh index 6f5236f..c8ae0c3 100644 --- a/lustre/tests/sanityn.sh +++ b/lustre/tests/sanityn.sh @@ -45,14 +45,11 @@ init_logging [ "$SLOW" = "no" ] && EXCEPT_SLOW="12 16 23 33a" -SANITYLOG=${TESTSUITELOG:-$TMP/$(basename $0 .sh).log} FAIL_ON_ERROR=false SETUP=${SETUP:-:} TRACE=${TRACE:-""} -[ "$SANITYLOG" ] && rm -f $SANITYLOG || true - check_and_setup_lustre LOVNAME=`lctl get_param -n llite.*.lov.common_name | tail -n 1` @@ -1876,8 +1873,7 @@ run_test 50 "osc lvb attrs: enqueue vs. CP AST ==============" log "cleanup: ======================================================" [ "$(mount | grep $MOUNT2)" ] && umount $MOUNT2 -check_and_cleanup_lustre -echo '=========================== finished ===============================' -[ -f "$SANITYLOG" ] && cat $SANITYLOG && grep -q FAIL $SANITYLOG && exit 1 || true -echo "$0: completed" +complete $(basename $0) $SECONDS +check_and_cleanup_lustre +exit_status diff --git a/lustre/tests/sgpdd-survey.sh b/lustre/tests/sgpdd-survey.sh index 7268cd8..3b27420 100644 --- a/lustre/tests/sgpdd-survey.sh +++ b/lustre/tests/sgpdd-survey.sh @@ -79,5 +79,5 @@ test_2 () { } run_test 2 "sgpdd-survey, osts, scsidevs" -equals_msg `basename $0`: test complete, cleaning up -[ -f "$TESTSUITELOG" ] && cat $TESTSUITELOG || true +complete $(basename $0) $SECONDS +exit_status diff --git a/lustre/tests/test-framework.sh b/lustre/tests/test-framework.sh index f77b467..89c76ae 100644 --- a/lustre/tests/test-framework.sh +++ b/lustre/tests/test-framework.sh @@ -58,44 +58,50 @@ usage() { print_summary () { trap 0 [ "$TESTSUITE" == "lfsck" ] && return 0 - [ -n "$ONLY" ] && echo "WARNING: ONLY is set to ${ONLY}." - local form="%-13s %-17s %s\n" - printf "$form" "status" "script" "skipped tests E(xcluded) S(low)" + [ -n "$ONLY" ] && echo "WARNING: ONLY is set to $(echo $ONLY)" + local details + local form="%-13s %-17s %-9s %s %s\n" + printf "$form" "status" "script" "Total(sec)" "E(xcluded) S(low)" echo "------------------------------------------------------------------------------------" for O in $DEFAULT_SUITES; do - local skipped="" - local slow="" O=$(echo $O | tr "-" "_" | tr "[:lower:]" "[:upper:]") - local o=$(echo $O | tr "[:upper:]" "[:lower:]") - o=${o//_/-} + [ "${!O}" = "no" ] && continue || true + local o=$(echo $O | tr "[:upper:]_" "[:lower:]-") local log=${TMP}/${o}.log - [ -f $log ] && skipped=$(grep excluded $log | awk '{ printf " %s", $3 }' | sed 's/test_//g') - [ -f $log ] && slow=$(grep SLOW $log | awk '{ printf " %s", $3 }' | sed 's/test_//g') - [ "${!O}" = "done" ] && \ - printf "$form" "Done" "$O" "E=$skipped" && \ - [ -n "$slow" ] && printf "$form" "-" "-" "S=$slow" - + if is_sanity_benchmark $o; then + log=${TMP}/sanity-benchmark.log + fi + local slow= + local skipped= + local total= + local status=Unfinished + if [ -f $log ]; then + skipped=$(grep excluded $log | awk '{ printf " %s", $3 }' | sed 's/test_//g') + slow=$(egrep "^PASS|^FAIL" $log | tr -d "("| sed s/s\)$//g | sort -nr -k 3 | head -5 | awk '{ print $2":"$3"s" }') + total=$(grep duration $log | awk '{ print $2}') + if [ "${!O}" = "done" ]; then + status=Done + fi + if $DDETAILS; then + local durations=$(egrep "^PASS|^FAIL" $log | tr -d "("| sed s/s\)$//g | awk '{ print $2":"$3"|" }') + details=$(printf "%s\n%s %s %s\n" "$details" "DDETAILS" "$O" "$(echo $durations)") + fi + fi + printf "$form" $status "$O" "${total}" "E=$skipped" + printf "$form" "-" "-" "-" "S=$(echo $slow)" done for O in $DEFAULT_SUITES; do O=$(echo $O | tr "-" "_" | tr "[:lower:]" "[:upper:]") if [ "${!O}" = "no" ]; then - # FIXME. - # only for those tests suits which are run directly from acc-sm script: - # bonnie, iozone, etc. - if [ -f "$TESTSUITELOG" ] && grep FAIL $TESTSUITELOG | grep -q ' '$O ; then - printf "$form" "UNFINISHED" "$O" "" - else - printf "$form" "Skipped" "$O" "" - fi + printf "$form" "Skipped" "$O" "" fi done - for O in $DEFAULT_SUITES; do - O=$(echo $O | tr "-" "_" | tr "[:lower:]" "[:upper:]") - [ "${!O}" = "done" -o "${!O}" = "no" ] || \ - printf "$form" "UNFINISHED" "$O" "" - done + # print the detailed tests durations if DDETAILS=true + if $DDETAILS; then + echo "$details" + fi } init_test_env() { @@ -246,6 +252,8 @@ init_test_env() { shift $((OPTIND - 1)) ONLY=${ONLY:-$*} + # print the durations of each test if "true" + DDETAILS=${DDETAILS:-false} [ "$TESTSUITELOG" ] && rm -f $TESTSUITELOG || true rm -f $TMP/*active } @@ -2887,6 +2895,14 @@ error_noexit() { echo "$@" > $LOGDIR/err } +exit_status () { + local status=0 + local log=$TESTSUITELOG + + [ -f "$log" ] && grep -q FAIL $log && status=1 + exit $status +} + error() { error_noexit "$@" exit 1 @@ -3037,6 +3053,12 @@ trace() { return 1 } +complete () { + equals_msg $1 test complete, duration $2 sec + [ -f "$TESTSUITELOG" ] && egrep .FAIL $TESTSUITELOG || true + echo duration $2 >>$TESTSUITELOG +} + pass() { # Set TEST_STATUS here; will be used for logging the result if [ -f $LOGDIR/err ]; then @@ -3044,7 +3066,7 @@ pass() { else TEST_STATUS="PASS" fi - echo $TEST_STATUS " " $@ + echo "$TEST_STATUS $@" 2>&1 | tee -a $TESTSUITELOG } check_mds() { @@ -3125,7 +3147,7 @@ run_one_logged() { echo "test_$1 returned $RC" | tee $LOGDIR/err duration=$((`date +%s` - $BEFORE)) - pass "(${duration}s)" + pass "$1" "(${duration}s)" [ -f $LOGDIR/err ] && TEST_ERROR=$(cat $LOGDIR/err) log_sub_test_end $TEST_STATUS $duration "$RC" "$TEST_ERROR" @@ -4475,3 +4497,15 @@ else echo \\\$(basename \\\$dv); fi;" } + +is_sanity_benchmark() { + local benchmarks="dbench bonnie iozone fsx" + local suite=$1 + for b in $benchmarks; do + if [ "$b" == "$suite" ]; then + return 0 + fi + done + return 1 +} + -- 1.8.3.1