| #!/bin/sh |
| ################################################################################ |
| ## ## |
| ## Copyright (c) International Business Machines Corp., 2001 ## |
| ## ## |
| ## This program is free software; you can redistribute it and#or modify ## |
| ## it under the terms of the GNU General Public License as published by ## |
| ## the Free Software Foundation; either version 2 of the License, or ## |
| ## (at your option) any later version. ## |
| ## ## |
| ## This program is distributed in the hope that it will be useful, but ## |
| ## WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY ## |
| ## or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License ## |
| ## for more details. ## |
| ## ## |
| ## You should have received a copy of the GNU General Public License ## |
| ## along with this program; if not, write to the Free Software ## |
| ## Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA ## |
| ## ## |
| ################################################################################ |
| # File: runltp |
| # |
| # Description: This script can be used to the tests in the LTP test suite |
| # |
| # Authors: Manoj Iyer - manjo@mail.utexas.edu |
| # Robbe Williamson - robbiew@us.ibm.com |
| # |
| # History: Oct 07 2003 - Modified - Manoj Iyer |
| # - use functions |
| # - clean up on script exit |
| # - error checking etc. |
| # |
| # Oct 08 2003 - Modified - Manoj Iyer |
| # - fixed bug in creating results directory |
| # - all checks should be enlclosed in " " to avoid bash error |
| # - exit with error if ltp-pan is not found in pan directory |
| # |
| # Jul 22 2007 - Modified - Ricardo Salveti de Araujo |
| # - added support to put more then one file at CMDLINE (-f) |
| # - added a new option, that the user can pass the address of |
| # the command file, and it'll use wget to get it (-w) |
| # - now -s does the grep at the selected command files (default, |
| # -f or -w) |
| # |
| # Jul 23 2007 - Modified - Ricardo Salveti de Araujo |
| # - added flag to get the command file that has all failed tests |
| # |
| # Sep 11 2007 - Modified - Subrata Modak |
| # - added option to create Failed File if it is not an absolute path |
| # - added option to create Output File if it is not an absolute path |
| # - added option to create Failed File compulsory, even if user has not mentioned it |
| # |
| # Sep 14 2007 - Modified - Ricardo Salveti de Araujo |
| # - cleaning and removing duplicated code |
| # |
| # Oct 27 2007 - Modified - Ricardo Salveti de Araujo and Subrata Modak |
| # - better ways to integrate "ltp/tools/genload/stress" with "ltp/runltp" |
| # Nov 24 2007 - Modified - Subrata Modak |
| # - Added a new option to generate output in HTML format also. Also retaining |
| # the original test format |
| # Nov 28 2007 - Modified - Subrata Modak |
| # - Added a new option to mail back LTP reports |
| # May 19 2008 - Modified - Subrata Modak |
| # - Added capability for default Log file generation |
| # Aug 17 2009 - Modified - Subrata Modak |
| # - Added Fault Injection generation Capability through -F Option |
| # |
| ################################################################################# |
| |
| |
| |
| setup() |
| { |
| cd `dirname $0` || \ |
| { |
| echo "FATAL: unable to change directory to $(dirname $0)" |
| exit 1 |
| } |
| export LTPROOT=${PWD} |
| export TMPBASE="/tmp" |
| export PATH="${PATH}:${LTPROOT}/testcases/bin:${LTPROOT}/bin" |
| |
| export LTP_DEV_FS_TYPE="ext2" |
| |
| [ -d "$LTPROOT/testcases/bin" ] || |
| { |
| echo "FATAL: LTP not installed correctly" |
| echo "INFO: Follow directions in INSTALL!" |
| exit 1 |
| } |
| |
| [ -e "$LTPROOT/bin/ltp-pan" ] || |
| { |
| echo "FATAL: Test suite driver 'ltp-pan' not found" |
| echo "INFO: Follow directions in INSTALL!" |
| exit 1 |
| } |
| } |
| |
| version_of_ltp() |
| { |
| cat "$LTPROOT/Version" |
| exit 0 |
| } |
| |
| usage() |
| { |
| cat <<-EOF >&2 |
| |
| usage: ${0##*/} [ -a EMAIL_TO ] [ -c NUM_PROCS ] [ -C FAILCMDFILE ] [ -T TCONFCMDFILE ] |
| [ -d TMPDIR ] [ -D NUM_PROCS,NUM_FILES,NUM_BYTES,CLEAN_FLAG ] -e [ -f CMDFILES(,...) ] |
| [ -g HTMLFILE] [ -i NUM_PROCS ] [ -l LOGFILE ] [ -m NUM_PROCS,CHUNKS,BYTES,HANGUP_FLAG ] |
| -N -n [ -o OUTPUTFILE ] -p -q -Q [ -r LTPROOT ] [ -s PATTERN ] [ -t DURATION ] |
| -v [ -w CMDFILEADDR ] [ -x INSTANCES ] [ -b DEVICE ] [-B LTP_DEV_FS_TYPE] |
| [ -F LOOPS,PERCENTAGE ] [ -z BIG_DEVICE ] [-Z LTP_BIG_DEV_FS_TYPE] |
| |
| -a EMAIL_TO EMAIL all your Reports to this E-mail Address |
| -c NUM_PROCS Run LTP under additional background CPU load |
| [NUM_PROCS = no. of processes creating the CPU Load by spinning over sqrt() |
| (Defaults to 1 when value)] |
| -C FAILCMDFILE Command file with all failed test cases. |
| -T TCONFCMDFILE Command file with all test cases that are not fully tested. |
| -d TMPDIR Directory where temporary files will be created. |
| -D NUM_PROCS,NUM_FILES,NUM_BYTES,CLEAN_FLAG |
| Run LTP under additional background Load on Secondary Storage (Seperate by comma) |
| [NUM_PROCS = no. of processes creating Storage Load by spinning over write()] |
| [NUM_FILES = Write() to these many files (Defaults to 1 when value 0 or undefined)] |
| [NUM_BYTES = write these many bytes (defaults to 1GB, when value 0 or undefined)] |
| [CLEAN_FLAG = unlink file to which random data written, when value 1] |
| -e Prints the date of the current LTP release |
| -f CMDFILES Execute user defined list of testcases (separate with ',') |
| -F LOOPS,PERCENTAGE Induce PERCENTAGE Fault in the Kernel Subsystems, and, run each test for LOOPS loop |
| -g HTMLFILE Create an additional HTML output format |
| -h Help. Prints all available options. |
| -i NUM_PROCS Run LTP under additional background Load on IO Bus |
| [NUM_PROCS = no. of processes creating IO Bus Load by spinning over sync()] |
| -K DMESG_LOG_DIR |
| Log Kernel messages generated for each test cases inside this directory |
| -l LOGFILE Log results of test in a logfile. |
| -m NUM_PROCS,CHUNKS,BYTES,HANGUP_FLAG |
| Run LTP under additional background Load on Main memory (Seperate by comma) |
| [NUM_PROCS = no. of processes creating main Memory Load by spinning over malloc()] |
| [CHUNKS = malloc these many chunks (default is 1 when value 0 or undefined)] |
| [BYTES = malloc CHUNKS of BYTES bytes (default is 256MB when value 0 or undefined) ] |
| [HANGUP_FLAG = hang in a sleep loop after memory allocated, when value 1] |
| -M CHECK_TYPE |
| [CHECK_TYPE=1 => Full Memory Leak Check tracing children as well] |
| [CHECK_TYPE=2 => Thread Concurrency Check tracing children as well] |
| [CHECK_TYPE=3 => Full Memory Leak & Thread Concurrency Check tracing children as well] |
| -N Run all the networking tests. |
| -o OUTPUTFILE Redirect test output to a file. |
| -p Human readable format logfiles. |
| -q Print less verbose output to screen. This implies |
| not logging start of the test in kernel log. |
| -Q Don't log start of test in kernel log. |
| -r LTPROOT Fully qualified path where testsuite is installed. |
| -R Randomize test order. |
| -s PATTERN Only run test cases which match PATTERN. |
| -S SKIPFILE Skip tests specified in SKIPFILE |
| -t DURATION Execute the testsuite for given duration. Examples: |
| -t 60s = 60 seconds |
| -t 45m = 45 minutes |
| -t 24h = 24 hours |
| -t 2d = 2 days |
| -I ITERATIONS Execute the testsuite ITERATIONS times. |
| -w CMDFILEADDR Uses wget to get the user's list of testcases. |
| -x INSTANCES Run multiple instances of this testsuite. |
| -b DEVICE Some tests require an unmounted block device |
| to run correctly. |
| -B LTP_DEV_FS_TYPE The file system of test block devices. |
| -z BIG_DEVICE Some tests require a big unmounted block device |
| to run correctly. |
| -Z LTP_BIG_DEV_FS_TYPE The file system of the big device |
| |
| |
| |
| example: ${0##*/} -c 2 -i 2 -m 2,4,10240,1 -D 2,10,10240,1 -p -q -l /tmp/result-log.$$ -o /tmp/result-output.$$ -C /tmp/result-failed.$$ -d ${PWD} |
| |
| |
| EOF |
| exit 0 |
| } |
| |
| main() |
| { |
| local CMDFILES= |
| local PRETTY_PRT= |
| local ALT_DIR_OUT=0 |
| local ALT_DIR_RES=0 |
| local ALT_HTML_OUT=0 |
| local ALT_EMAIL_OUT=0 |
| local ALT_DMESG_OUT=0 |
| local RUN_NETEST=0 |
| local RUN_REPEATED=0 |
| local QUIET_MODE= |
| local NO_KMSG= |
| local NETPIPE=0 |
| local GENLOAD=0 |
| local MEMSIZE=0 |
| local DURATION= |
| local CMDFILEADDR= |
| local FAILCMDFILE= |
| local TCONFCMDFILE= |
| local INJECT_KERNEL_FAULT= |
| local INJECT_KERNEL_FAULT_PERCENTAGE= |
| local INJECT_FAULT_LOOPS_PER_TEST= |
| local VALGRIND_CHECK= |
| local VALGRIND_CHECK_TYPE= |
| local LOGFILE_NAME= |
| local LOGFILE= |
| local OUTPUTFILE_NAME= |
| local OUTPUTFILE= |
| local HTMLFILE_NAME= |
| local HTMLFILE= |
| local DMESG_DIR= |
| local EMAIL_TO= |
| local TAG_RESTRICT_STRING= |
| local PAN_COMMAND= |
| local RANDOMRUN=0 |
| local DEFAULT_FILE_NAME_GENERATION_TIME=`date +"%Y_%m_%d-%Hh_%Mm_%Ss"` |
| local scenfile= |
| |
| version_date=$(cat "$LTPROOT/Version") |
| |
| while getopts a:b:B:c:C:T:d:D:ef:F:g:hi:I:K:l:m:M:No:pqQr:Rs:S:t:T:w:x:z:Z: arg |
| do case $arg in |
| a) EMAIL_TO=$OPTARG |
| ALT_EMAIL_OUT=1;; |
| c) |
| NUM_PROCS=$(($OPTARG)) |
| if [ "$NUM_PROCS" -eq 0 ]; then |
| # User Did not Define the Value ,or, User Defined Zero, |
| # hence, prevent from creating infinite processes |
| NUM_PROCS=1 |
| fi |
| $LTPROOT/testcases/bin/genload --cpu $NUM_PROCS >/dev/null 2>&1 & |
| GENLOAD=1 ;; |
| |
| C) |
| case $OPTARG in |
| /*) |
| FAILCMDFILE="-C $OPTARG" ;; |
| *) |
| FAILCMDFILE="-C $LTPROOT/output/$OPTARG" |
| ALT_DIR_OUT=1 ;; |
| esac ;; |
| |
| T) |
| case $OPTARG in |
| /*) |
| TCONFCMDFILE="-T $OPTARG" ;; |
| *) |
| TCONFCMDFILE="-T $LTPROOT/output/$OPTARG" |
| ALT_DIR_OUT=1 ;; |
| esac ;; |
| |
| d) # convert the user path to absolute path. |
| export TMPBASE=$(readlink -f ${OPTARG}) ;; |
| |
| D) NUM_PROCS=1; NUM_FILES=1; NUM_BYTES=$((1024 * 1024 * 1024)); CLEAN_FLAG=0 |
| ARGUMENT_LIST=$OPTARG |
| TOTAL_ARGUMENTS=1 |
| for ARGUMENT in `echo "$ARGUMENT_LIST" | tr ',' ' '` |
| do |
| case $TOTAL_ARGUMENTS in |
| 1) NUM_PROCS="$ARGUMENT" ;; |
| 2) NUM_FILES="$ARGUMENT" ;; |
| 3) NUM_BYTES="$ARGUMENT" ;; |
| 4) CLEAN_FLAG="$ARGUMENT" ;; |
| esac |
| TOTAL_ARGUMENTS=`expr $TOTAL_ARGUMENTS + 1` |
| done |
| # just to get the default values if the user passed 0 |
| if [ "$NUM_PROCS" -eq 0 ]; then |
| NUM_PROCS=1 |
| fi |
| if [ "$NUM_FILES" -eq 0 ]; then |
| NUM_FILES=1 |
| fi |
| if [ "$NUM_BYTES" -eq 0 ]; then |
| NUM_BYTES=$((1024 * 1024 * 1024)) |
| fi |
| if [ "$CLEAN_FLAG" -ne 1 ]; then |
| CLEAN_FLAG=0 |
| fi |
| if [ "$CLEAN_FLAG" -eq 1 ]; then |
| # Do not unlink file in this case |
| $LTPROOT/testcases/bin/genload --hdd $NUM_PROCS --hdd-files \ |
| $NUM_FILES --hdd-bytes $NUM_BYTES >/dev/null 2>&1 & |
| else |
| # Cleanup otherwise |
| $LTPROOT/testcases/bin/genload --hdd $NUM_PROCS --hdd-files \ |
| $NUM_FILES --hdd-bytes $NUM_BYTES --hdd-noclean >/dev/null 2>&1 & |
| fi |
| GENLOAD=1;; |
| |
| e) # Print out the version of LTP |
| version_of_ltp |
| ;; |
| f) # Execute user defined set of testcases. |
| # Can be more then one file, just separate it with ',', like: |
| # -f nfs,commands,/tmp/testfile |
| CMDFILES=$OPTARG;; |
| F) INJECT_KERNEL_FAULT=1 |
| #Seperate out the NO_OF_LOOPS & FAULT_PERCENTAGE |
| INJECT_FAULT_LOOPS_PER_TEST=`echo $OPTARG |cut -d',' -f1 | tr -d '\n' | tr -d ' '` |
| INJECT_KERNEL_FAULT_PERCENTAGE=`echo $OPTARG |cut -d',' -f2 | tr -d '\n' | tr -d ' '` |
| if [ ! $INJECT_FAULT_LOOPS_PER_TEST ]; then |
| echo "Loops not properly defined. Resorting to default 5..." |
| export INJECT_FAULT_LOOPS_PER_TEST=5 |
| fi |
| if [ ! $INJECT_KERNEL_FAULT_PERCENTAGE ]; then |
| echo "Fault Persentage not properly defined. Resorting to default 10..." |
| export INJECT_KERNEL_FAULT_PERCENTAGE=10 |
| fi;; |
| g) HTMLFILE_NAME="$OPTARG" |
| case $OPTARG in |
| /*) |
| HTMLFILE="$OPTARG";; |
| *) |
| HTMLFILE="$LTPROOT/output/$OPTARG";; |
| esac |
| ALT_DIR_OUT=1 |
| ALT_HTML_OUT=1;; |
| h) usage;; |
| |
| i) |
| NUM_PROCS=$(($OPTARG)) |
| if [ "$NUM_PROCS" -eq 0 ]; then |
| # User Did not Define the Value ,or, User Defined Zero, |
| # hence, prevent from creating infinite processes |
| NUM_PROCS=1 |
| fi |
| $LTPROOT/testcases/bin/genload --io $NUM_PROCS >/dev/null 2>&1 & |
| GENLOAD=1 ;; |
| |
| K) |
| case $OPTARG in |
| /*) |
| DMESG_DIR="$OPTARG-dmesg-output-`echo $$-``date +%X | tr -d ' '`";; |
| *) |
| DMESG_DIR="$LTPROOT/output/$OPTARG-dmesg-output-`echo $$-``date +%X | tr -d ' '`";; |
| esac |
| mkdir -p $DMESG_DIR |
| ALT_DMESG_OUT=1;; |
| l) |
| LOGFILE_NAME="$OPTARG" |
| case $OPTARG in |
| /*) |
| LOGFILE="-l $OPTARG" ;; |
| *) |
| LOGFILE="-l $LTPROOT/results/$OPTARG" |
| ALT_DIR_RES=1 ;; |
| esac ;; |
| |
| m) NUM_PROCS=1; CHUNKS=1; BYTES=$((256 * 1024 * 1024)); HANGUP_FLAG=0 |
| ARGUMENT_LIST=$OPTARG |
| TOTAL_ARGUMENTS=1 |
| for ARGUMENT in `echo "$ARGUMENT_LIST" | tr ',' ' '` |
| do |
| case $TOTAL_ARGUMENTS in |
| 1) NUM_PROCS="$ARGUMENT" ;; |
| 2) CHUNKS="$ARGUMENT" ;; |
| 3) BYTES="$ARGUMENT" ;; |
| 4) HANGUP_FLAG="$ARGUMENT" ;; |
| esac |
| TOTAL_ARGUMENTS=`expr $TOTAL_ARGUMENTS + 1` |
| done |
| # just to get the default values if the user passed 0 |
| if [ "$NUM_PROCS" -eq 0 ]; then |
| NUM_PROCS=1 |
| fi |
| if [ "$CHUNKS" -eq 0 ]; then |
| CHUNKS=1 |
| fi |
| if [ "$BYTES" -eq 0 ]; then |
| BYTES=$((256 * 1024 * 1024)) |
| fi |
| if [ "$HANGUP_FLAG" -ne 1 ]; then |
| HANGUP_FLAG=0 |
| fi |
| if [ "$HANGUP_FLAG" -eq 1 ]; then |
| # Hang in a Sleep loop after memory allocated |
| $LTPROOT/testcases/bin/genload --vm $NUM_PROCS --vm-chunks \ |
| $CHUNKS --vm-bytes $BYTES --vm-hang >/dev/null 2>&1 & |
| else |
| # Otherwise Do not Hangup |
| $LTPROOT/testcases/bin/genload --vm $NUM_PROCS --vm-chunks \ |
| $CHUNKS --vm-bytes $BYTES >/dev/null 2>&1 & |
| fi |
| GENLOAD=1;; |
| M) |
| VALGRIND_CHECK=1 |
| VALGRIND_CHECK_TYPE="$OPTARG";; |
| |
| N) RUN_NETEST=1;; |
| |
| o) OUTPUTFILE_NAME="$OPTARG" |
| case $OPTARG in |
| /*) |
| OUTPUTFILE="-o $OPTARG";; |
| *) |
| OUTPUTFILE="-o $LTPROOT/output/$OPTARG" |
| ALT_DIR_OUT=1 ;; |
| esac ;; |
| |
| p) PRETTY_PRT="-p";; |
| |
| q) QUIET_MODE="-q";; |
| |
| Q) NO_KMSG="-Q";; |
| |
| r) LTPROOT=$OPTARG;; |
| |
| R) RANDOMRUN=1;; |
| |
| s) TAG_RESTRICT_STRING=$OPTARG;; |
| |
| S) case $OPTARG in |
| /*) |
| SKIPFILE=$OPTARG;; |
| *) |
| SKIPFILE="$LTPROOT/$OPTARG";; |
| esac ;; |
| |
| t) # In case you want to specify the time |
| # to run from the command line |
| # (2m = two minutes, 2h = two hours, etc) |
| DURATION="-t $OPTARG" ;; |
| |
| I) # In case you want the testcases to runsequentially RUN_REPEATED times |
| RUN_REPEATED=$OPTARG;; |
| |
| w) CMDFILEADDR=$OPTARG;; |
| |
| x) # number of ltp's to run |
| cat <<-EOF >&1 |
| WARNING: The use of -x can cause unpredictable failures, as a |
| result of concurrently running multiple tests designed |
| to be ran exclusively. |
| Pausing for 10 seconds..." |
| EOF |
| sleep 10 |
| INSTANCES="-x $OPTARG";; |
| b) DEVICE=$OPTARG;; |
| B) LTP_DEV_FS_TYPE=$OPTARG;; |
| z) BIG_DEVICE=$OPTARG;; |
| Z) BIG_DEVICE_FS_TYPE=$OPTARG;; |
| \?) usage;; |
| esac |
| done |
| |
| ## It would be nice to create a default log file even if the user has not mentioned |
| if [ ! "$LOGFILE" ]; then ## User has not mentioned about Log File name |
| LOGFILE_NAME="$DEFAULT_FILE_NAME_GENERATION_TIME" |
| LOGFILE="-l $LTPROOT/results/LTP_RUN_ON-$LOGFILE_NAME.log" |
| ALT_DIR_RES=1 |
| PRETTY_PRT="-p" |
| fi |
| |
| ## It would be nice if a Failed File is compulsorily created (gives User better Idea of Tests that failed) |
| |
| if [ ! "$FAILCMDFILE" ]; then ## User has not mentioned about Failed File name |
| ALT_DIR_OUT=1 |
| if [ ! "$OUTPUTFILE" ]; then ## User has not mentioned about Output File name either |
| if [ ! "$LOGFILE" ]; then ## User has not mentioned about Log File name either |
| FAILED_FILE_NAME="$DEFAULT_FILE_NAME_GENERATION_TIME" |
| FAILCMDFILE="-C $LTPROOT/output/LTP_RUN_ON-$FAILED_FILE_NAME.failed" |
| else ## User Fortunately wanted a log file, |
| FAILED_FILE_NAME=`basename $LOGFILE_NAME` ## Extract log file name and use it to construct Failed file name |
| FAILCMDFILE="-C $LTPROOT/output/LTP_RUN_ON-$FAILED_FILE_NAME.failed" |
| fi |
| else ## User Fortunately wanted a Output file |
| FAILED_FILE_NAME=`basename $OUTPUTFILE_NAME` ## Extract output file name and use it to construct Failed file name |
| FAILCMDFILE="-C $LTPROOT/output/LTP_RUN_ON-$FAILED_FILE_NAME.failed" |
| fi |
| fi |
| |
| if [ ! "$TCONFCMDFILE" ]; then |
| ALT_DIR_OUT=1 |
| if [ ! "$OUTPUTFILE" ]; then |
| if [ ! "$LOGFILE" ]; then |
| TCONF_FILE_NAME="$DEFAULT_FILE_NAME_GENERATION_TIME" |
| TCONFCMDFILE="-T $LTPROOT/output/LTP_RUN_ON-${TCONF_FILE_NAME}.tconf" |
| else |
| TCONF_FILE_NAME=`basename $LOGFILE_NAME` |
| TCONFCMDFILE="-T $LTPROOT/output/LTP_RUN_ON-${TCONF_FILE_NAME}.tconf" |
| fi |
| else |
| TCONF_FILE_NAME=`basename $OUTPUTFILE_NAME` |
| TCONFCMDFILE="-T $LTPROOT/output/LTP_RUN_ON-${TCONF_FILE_NAME}.tconf" |
| fi |
| fi |
| |
| if [ "$ALT_HTML_OUT" -eq 1 ] ; then ## User wants the HTML version of the output |
| QUIET_MODE="" ## Suppressing this guy as it will prevent generation of proper output |
| ## which the HTML parser will require |
| if [ ! "$OUTPUTFILE" ]; then ## User has not mentioned about the Outputfile name, then we need to definitely generate one |
| OUTPUTFILE_NAME="$DEFAULT_FILE_NAME_GENERATION_TIME" |
| OUTPUTFILE="-o $LTPROOT/output/LTP_RUN_ON-$OUTPUTFILE_NAME.output" |
| ALT_DIR_OUT=1 |
| if [ ! "$HTMLFILE" ] ; then ## User has not mentioned HTML File name, We need to create one |
| HTMLFILE_NAME=`basename $OUTPUTFILE_NAME` |
| HTMLFILE="$LTPROOT/output/$HTMLFILE_NAME.html" |
| fi |
| fi |
| fi |
| |
| # If we need, create the output directory |
| [ "$ALT_DIR_OUT" -eq 1 ] && \ |
| { |
| [ ! -d $LTPROOT/output ] && \ |
| { |
| echo "INFO: creating $LTPROOT/output directory" |
| mkdir -p $LTPROOT/output || \ |
| { |
| echo "ERROR: failed to create $LTPROOT/output" |
| exit 1 |
| } |
| } |
| } |
| # If we need, create the results directory |
| [ "$ALT_DIR_RES" -eq 1 ] && \ |
| { |
| echo "INFO: creating $LTPROOT/results directory" |
| [ ! -d $LTPROOT/results ] && \ |
| { |
| mkdir -p $LTPROOT/results || \ |
| { |
| echo "ERROR: failed to create $LTPROOT/results" |
| exit 1 |
| } |
| } |
| } |
| |
| # Added -m 777 for tests that call tst_tmpdir() and try to |
| # write to it as user nobody |
| mkdir -m 777 -p $TMPBASE || \ |
| { |
| echo "FATAL: Unable to make temporary directory $TMPBASE" |
| exit 1 |
| } |
| # use mktemp to create "safe" temporary directories |
| export TMPTEMPLATE="${TMPBASE}/ltp-XXXXXXXXXX" |
| TMP=`mktemp -d $TMPTEMPLATE` || \ |
| { |
| echo "FATAL: Unable to make temporary directory: $TMP" |
| exit 1 |
| } |
| export TMP |
| # To be invoked by tst_tmpdir() |
| # write to it as user nobody |
| export TMPDIR=$TMP |
| |
| trap "cleanup" 0 |
| |
| chmod 777 $TMP || \ |
| { |
| echo "unable to chmod 777 $TMP ... aborting" |
| exit 1 |
| } |
| |
| cd $TMP || \ |
| { |
| echo "could not cd ${TMP} ... exiting" |
| exit 1 |
| } |
| |
| [ -n "$INSTANCES" ] && \ |
| { |
| INSTANCES="$INSTANCES -O ${TMP}" |
| } |
| |
| # If user does not provide a command file select a default set of testcases |
| # to execute. |
| if [ -z "$CMDFILES" ] && [ -z "$CMDFILEADDR" ]; then |
| |
| SCENARIO_LISTS="$LTPROOT/scenario_groups/default" |
| if [ "$RUN_NETEST" -eq 1 ]; then |
| SCENARIO_LISTS="$LTPROOT/scenario_groups/network" |
| fi |
| |
| cat <<-EOF >&1 |
| INFO: no command files were provided. Executing following runtest scenario files: |
| `cat $SCENARIO_LISTS | tr '\012' ' '` |
| |
| EOF |
| cat_ok_sentinel=$TMP/cat_ok.$$ |
| touch "$cat_ok_sentinel" |
| cat $SCENARIO_LISTS | while read scenfile; do |
| scenfile=${LTPROOT}/runtest/$scenfile |
| [ -f "$scenfile" ] || continue |
| |
| cat $scenfile >> "$TMP/alltests" || { |
| echo "FATAL: unable to append to command file" |
| rm -Rf "$TMP" |
| rm -f "$cat_ok_sentinel" |
| exit 1 |
| } |
| done |
| rm -f "$cat_ok_sentinel" |
| fi |
| |
| [ -n "$CMDFILES" ] && \ |
| { |
| for scenfile in `echo "$CMDFILES" | tr ',' ' '` |
| do |
| [ -f "$scenfile" ] || scenfile="$LTPROOT/runtest/$scenfile" |
| cat "$scenfile" >> ${TMP}/alltests || \ |
| { |
| echo "FATAL: unable to create command file" |
| rm -Rf "$TMP" |
| exit 1 |
| } |
| done |
| } |
| |
| [ -n "$CMDFILEADDR" ] && \ |
| { |
| wget -q "${CMDFILEADDR}" -O ${TMP}/wgetcmdfile |
| if [ $? -ne 0 ]; then |
| echo "FATAL: error while getting the command file with wget (address $CMDFILEADDR)" |
| exit 1 |
| fi |
| cat "${TMP}/wgetcmdfile" >> ${TMP}/alltests || \ |
| { |
| echo "FATAL: unable to create command file" |
| exit 1 |
| } |
| } |
| |
| # The fsx-linux tests use the SCRATCHDEV environment variable as a location |
| # that can be reformatted and run on. Set SCRATCHDEV if you want to run |
| # these tests. As a safeguard, this is disabled. |
| unset SCRATCHDEV |
| [ -n "$SCRATCHDEV" ] && \ |
| { |
| cat ${LTPROOT}/runtest/fsx >> ${TMP}/alltests || |
| { |
| echo "FATAL: unable to create fsx-linux tests command file" |
| exit 1 |
| } |
| } |
| |
| # If enabled, execute only test cases that match the PATTERN |
| if [ -n "$TAG_RESTRICT_STRING" ] |
| then |
| mv -f ${TMP}/alltests ${TMP}/alltests.orig |
| grep $TAG_RESTRICT_STRING ${TMP}/alltests.orig > ${TMP}/alltests #Not worth checking return codes for this case |
| fi |
| |
| # Blacklist or skip tests if a SKIPFILE was specified with -S |
| if [ -n "${SKIPFILE}" ]; then |
| for test_name in $(awk '{print $1}' "${SKIPFILE}"); do |
| case "${test_name}" in \#*) continue;; esac |
| sed -i "/\<${test_name}\>/c\\${test_name} exit 32;" alltests |
| done |
| fi |
| |
| # check for required users and groups |
| ${LTPROOT}/IDcheck.sh || \ |
| { |
| echo "WARNING: required users and groups not present" |
| echo "WARNING: some test cases may fail" |
| } |
| |
| # display versions of installed software |
| [ -z "$QUIET_MODE" ] && \ |
| { |
| ${LTPROOT}/ver_linux || \ |
| { |
| echo "WARNING: unable to display versions of software installed" |
| exit 1 |
| } |
| } |
| |
| set_block_device |
| |
| # here even if the user don't specify a big block device, we |
| # also don't create the big block device. |
| if [ -z "$BIG_DEVICE" ]; then |
| echo "no big block device was specified on commandline." |
| echo "Tests which require a big block device are disabled." |
| echo "You can specify it with option -z" |
| else |
| export LTP_BIG_DEV=$BIG_DEVICE |
| if [ -z "$BIG_DEVICE_FS_TYPE" ]; then |
| export LTP_BIG_DEV_FS_TYPE="ext2" |
| else |
| export LTP_BIG_DEV_FS_TYPE=$BIG_DEVICE_FS_TYPE |
| fi |
| fi |
| |
| if [ $RUN_REPEATED -gt 1 ]; then # You need to specify at least more than 1 sequential run, else it runs default |
| echo "PAN will run these test cases $RUN_REPEATED times....." |
| echo "Test Tags will be Prepended with ITERATION NO.s....." |
| inc=1 |
| sed -e '/^$/ d' -e 's/^[ ,\t]*//' -e '/^#/ d' < ${TMP}/alltests > ${TMP}/alltests.temp ##This removes all newlines, leading spaces, tabs, # |
| sed 's/^[0-9,a-z,A-Z]*/'"$inc"'_ITERATION_&/' < ${TMP}/alltests.temp > ${TMP}/alltests ## .temp is kept as Base file |
| while [ $inc -lt $RUN_REPEATED ] ; do |
| inc=`expr $inc + 1` |
| sed 's/^[0-9,a-z,A-Z]*/'"$inc"'_ITERATION_&/' < ${TMP}/alltests.temp >> ${TMP}/alltests #Keep appending with Iteration No.s |
| done |
| fi |
| |
| if [ "$RANDOMRUN" != "0" ]; then |
| sort -R ${TMP}/alltests -o ${TMP}/alltests |
| fi |
| |
| [ ! -z "$QUIET_MODE" ] && { echo "INFO: Test start time: $(date)" ; } |
| PAN_COMMAND="${LTPROOT}/bin/ltp-pan $QUIET_MODE $NO_KMSG -e -S $INSTANCES $DURATION -a $$ \ |
| -n $$ $PRETTY_PRT -f ${TMP}/alltests $LOGFILE $OUTPUTFILE $FAILCMDFILE $TCONFCMDFILE" |
| echo "COMMAND: $PAN_COMMAND" |
| if [ ! -z "$TAG_RESTRICT_STRING" ] ; then |
| echo "INFO: Restricted to $TAG_RESTRICT_STRING" |
| fi |
| #$PAN_COMMAND #Duplicated code here, because otherwise if we fail, only "PAN_COMMAND" gets output |
| |
| ## Display the Output/Log/Failed/HTML file names here |
| printf "LOG File: " |
| echo $LOGFILE | cut -b4- |
| |
| if [ "$OUTPUTFILE" ]; then |
| printf "OUTPUT File: " |
| echo $OUTPUTFILE | cut -b4- |
| fi |
| |
| printf "FAILED COMMAND File: " |
| echo $FAILCMDFILE | cut -b4- |
| |
| printf "TCONF COMMAND File: " |
| echo $TCONFCMDFILE | cut -b4- |
| |
| if [ "$HTMLFILE" ]; then |
| echo "HTML File: $HTMLFILE" |
| fi |
| |
| echo "Running tests......." |
| test_start_time=$(date) |
| |
| # User wants testing with Kernel Fault Injection |
| if [ $INJECT_KERNEL_FAULT ] ; then |
| #See if Debugfs is mounted, and |
| #Fault Injection Framework available through Debugfs |
| use_faultinjection=true |
| for debug_subdir in \ |
| fail_io_timeout \ |
| fail_make_request \ |
| fail_page_alloc \ |
| failslab \ |
| ; do |
| if [ -d "/sys/kernel/debug/$debug_subdir" ] |
| then |
| use_faultinjection=true |
| break |
| fi |
| done |
| if $use_faultinjection; then |
| #If atleast one of the Framework is available |
| #Go ahead to Inject Fault & Create required |
| #Command Files for LTP run |
| echo Running tests with Fault Injection Enabled in the Kernel... |
| awk -v LOOPS=$INJECT_FAULT_LOOPS_PER_TEST \ |
| -v PERCENTAGE=$INJECT_KERNEL_FAULT_PERCENTAGE \ |
| -f ${LTPROOT}/bin/create_kernel_faults_in_loops_and_probability.awk \ |
| ${TMP}/alltests > ${TMP}/alltests.tmp |
| cp ${TMP}/alltests.tmp ${TMP}/alltests |
| rm -rf ${TMP}/alltests.tmp |
| else |
| echo Fault Injection not enabled in the Kernel.. |
| echo Running tests normally... |
| fi |
| fi |
| |
| ## Valgrind Check will work only when Kernel Fault Injection is not expected, |
| ## We do not want to test Faults when valgrind is running |
| if [ $VALGRIND_CHECK ]; then |
| if [ ! $INJECT_KERNEL_FAULT ]; then |
| which valgrind || VALGRIND_CHECK_TYPE=XYZ |
| case $VALGRIND_CHECK_TYPE in |
| [1-3]) |
| awk -v CHECK_LEVEL=$VALGRIND_CHECK_TYPE \ |
| -f ${LTPROOT}/bin/create_valgrind_check.awk \ |
| ${TMP}/alltests $VALGRIND_CHECK_TYPE > \ |
| ${TMP}/alltests.tmp |
| cp ${TMP}/alltests.tmp ${TMP}/alltests |
| rm -rf ${TMP}/alltests.tmp |
| ;; |
| *) |
| echo "Invalid Memory Check Type, or, Valgrind is not available" |
| ;; |
| esac |
| fi |
| fi |
| |
| if [ $ALT_DMESG_OUT -eq 1 ] ; then |
| #We want to print dmesg output for each test,lets do the trick inside the script |
| echo Enabling dmesg output logging for each test... |
| awk -v DMESG_DIR=$DMESG_DIR \ |
| -f ${LTPROOT}/bin/create_dmesg_entries_for_each_test.awk \ |
| ${TMP}/alltests > ${TMP}/alltests.tmp |
| cp ${TMP}/alltests.tmp ${TMP}/alltests |
| rm -rf ${TMP}/alltests.tmp |
| fi |
| # Some tests need to run inside the "bin" directory. |
| cd "${LTPROOT}/testcases/bin" |
| "${LTPROOT}/bin/ltp-pan" $QUIET_MODE $NO_KMSG -e -S $INSTANCES $DURATION -a $$ -n $$ $PRETTY_PRT -f ${TMP}/alltests $LOGFILE $OUTPUTFILE $FAILCMDFILE $TCONFCMDFILE |
| |
| if [ $? -eq 0 ]; then |
| echo "INFO: ltp-pan reported all tests PASS" |
| VALUE=0 |
| export LTP_EXIT_VALUE=0; |
| else |
| echo "INFO: ltp-pan reported some tests FAIL" |
| VALUE=1 |
| export LTP_EXIT_VALUE=1; |
| fi |
| cd .. |
| echo "LTP Version: $version_date" |
| |
| # $DMESG_DIR is used to cache messages obtained from dmesg after a test run. |
| # Proactively reap all of the 0-byte files in $DMESG_DIR as they have zero value |
| # and only clutter up the filesystem. |
| |
| if [ $ALT_DMESG_OUT -eq 1 ] ; then |
| if ! find "$DMESG_DIR" -size 0 -exec rm {} + ; then |
| echo "cd to $DMESG_DIR failed: $?" |
| fi |
| if [ -n "$(ls "$DMESG_DIR")" ] ; then |
| echo "Kernel messages were generated for LTP tests $version_date" |
| else |
| echo "No Kernel messages were generated for LTP tests $version_date" |
| fi |
| fi |
| |
| if [ "$ALT_HTML_OUT" -eq 1 ] ; then #User wants the HTML output to be created, it then needs to be generated |
| export LTP_VERSION=$version_date |
| export TEST_START_TIME="$test_start_time" |
| export TEST_END_TIME="$(date)" |
| OUTPUT_DIRECTORY=`echo $OUTPUTFILE | cut -c4-` |
| LOGS_DIRECTORY="$LTPROOT/results" |
| export TEST_OUTPUT_DIRECTORY="$LTPROOT/output" |
| export TEST_LOGS_DIRECTORY=$LOGS_DIRECTORY |
| echo "Generating HTML Output.....!!" |
| ( perl $LTPROOT/bin/genhtml.pl $LTPROOT/bin/html_report_header.txt test_start test_end test_output execution_status $OUTPUT_DIRECTORY > $HTMLFILE; ) |
| echo "Generated HTML Output.....!!" |
| echo "Location: $HTMLFILE"; |
| |
| fi |
| |
| if [ "$ALT_EMAIL_OUT" -eq 1 ] ; then ## User wants reports to be e-mailed |
| TAR_FILE_NAME=LTP_RUN_$version_date$DEFAULT_FILE_NAME_GENERATION_TIME.tar |
| if [ "$HTMLFILE_NAME" ] ; then ## HTML file Exists |
| if [ "$ALT_HTML_OUT" -ne 1 ] ; then ## The HTML file path is absolute and not $LTPROOT/output |
| mkdir -p $LTPROOT/output ## We need to create this Directory |
| cp $HTMLFILE_NAME $LTPROOT/output/ |
| fi |
| fi |
| if [ "$OUTPUTFILE_NAME" ] ; then ## Output file exists |
| if [ "$ALT_DIR_OUT" -ne 1 ] ; then ## The Output file path is absolute and not $LTPROOT/output |
| mkdir -p $LTPROOT/output ## We need to create this Directory |
| cp $OUTPUTFILE_NAME $LTPROOT/output/ |
| fi |
| fi |
| if [ "$LOGFILE_NAME" ] ; then ## Log file exists |
| if [ "$ALT_DIR_RES" -ne 1 ] ; then ## The Log file path is absolute and not $LTPROOT/results |
| mkdir -p $LTPROOT/results ## We need to create this Directory |
| cp $LOGFILE_NAME $LTPROOT/results/ |
| fi |
| fi |
| if [ -d $LTPROOT/output ] ; then |
| tar -cf ./$TAR_FILE_NAME $LTPROOT/output |
| if [ $? -eq 0 ]; then |
| echo "Created TAR File: ./$TAR_FILE_NAME successfully, added $LTPROOT/output" |
| else |
| echo "Cannot Create TAR File: ./$TAR_FILE_NAME for adding $LTPROOT/output" |
| fi |
| fi |
| if [ -d $LTPROOT/results ] ; then |
| tar -uf ./$TAR_FILE_NAME $LTPROOT/results |
| if [ $? -eq 0 ]; then |
| echo "Updated TAR File: ./$TAR_FILE_NAME successfully, added $LTPROOT/results" |
| else |
| echo "Cannot Update TAR File: ./$TAR_FILE_NAME for adding $LTPROOT/results" |
| fi |
| fi |
| if [ -e $LTPROOT/nohup.out ] ; then ## If User would have Chosen nohup to do ltprun |
| tar -uf ./$TAR_FILE_NAME $LTPROOT/nohup.out |
| if [ $? -eq 0 ]; then |
| echo "Updated TAR File: ./$TAR_FILE_NAME successfully, added $LTPROOT/nohup.out" |
| else |
| echo "Cannot Update TAR File: ./$TAR_FILE_NAME for adding $LTPROOT/nohup.out" |
| fi |
| fi |
| gzip ./$TAR_FILE_NAME ## gzip this guy |
| if [ $? -eq 0 ]; then |
| echo "Gunzipped TAR File: ./$TAR_FILE_NAME" |
| else |
| echo "Cannot Gunzip TAR File: ./$TAR_FILE_NAME" |
| fi |
| if which mutt >/dev/null 2>&1; then |
| echo "Starting mailing reports to: $EMAIL_TO, file: ./$TAR_FILE_NAME.gz" |
| mutt -a ./$TAR_FILE_NAME.gz -s "LTP Reports on $test_start_time" -- $EMAIL_TO < /dev/null |
| if [ $? -eq 0 ]; then |
| echo "Reports Successfully mailed to: $EMAIL_TO" |
| else |
| echo "Reports cannot be mailed to: $EMAIL_TO" |
| fi |
| else ## Use our Ageold mail program |
| echo "Starting mailing reports to: $EMAIL_TO, file: ./$TAR_FILE_NAME.gz" |
| uuencode ./$TAR_FILE_NAME.gz $TAR_FILE_NAME.gz | mail $EMAIL_TO -s "LTP Reports on $test_start_time" |
| if [ $? -eq 0 ]; then |
| echo "Reports Successfully mailed to: $EMAIL_TO" |
| else |
| echo "Reports cannot be mailed to: $EMAIL_TO" |
| fi |
| fi |
| fi |
| |
| [ ! -z "$QUIET_MODE" ] && { echo "INFO: Test end time: $(date)" ; } |
| |
| [ "$GENLOAD" -eq 1 ] && { killall -9 genload >/dev/null 2>&1; } |
| [ "$NETPIPE" -eq 1 ] && { killall -9 NPtcp >/dev/null 2>&1; } |
| |
| [ "$ALT_DIR_OUT" -eq 1 ] || [ "$ALT_DIR_RES" -eq 1 ] && \ |
| { |
| cat <<-EOF >&1 |
| |
| ############################################################### |
| |
| Done executing testcases. |
| LTP Version: $version_date |
| ############################################################### |
| |
| EOF |
| } |
| exit $VALUE |
| } |
| |
| set_block_device() |
| { |
| if [ -n "$DEVICE" ]; then |
| export LTP_DEV=$DEVICE |
| fi |
| } |
| |
| cleanup() |
| { |
| [ "$LOOP_DEV" ] && losetup -d $LOOP_DEV |
| rm -rf ${TMP} |
| } |
| |
| |
| LTP_SCRIPT="$(basename $0)" |
| |
| if [ "$LTP_SCRIPT" = "runltp" ]; then |
| setup |
| main "$@" |
| fi |