衔接上一篇内容,以下是analyze.sh脚本内容。
2、analyze.sh脚本
#! /bin/sh
###########################################################
# Copyright (c) 2012, Heng.Wang. All rights reserved.
#
# This program is used to analyze the sysbench test report.
###########################################################
# set -x
# Get the key value of input arguments format like '--args=value'.
get_key_value()
{
echo "$1" | sed 's/^--[a-zA-Z_-]*=//'
}
# Usage will be helpful when you need to input the valid arguments.
usage()
{
cat <<EOF
Usage: $0 [configure-options]
-?, --help Show this help message.
--inputdir=<> Set the input file path.
--min-threads=<> Set the min threads number.
--max-threads=<> Set the max threads number.
--step=<> Set the thread incremental step.
--count=<> Set the count of test.
--outputdir=<> Set the output directory.
Note: this script is intended for internal use by developers.
EOF
}
# Print the default value of the arguments of the script.
print_default()
{
cat <<EOF
The default value of the variables:
inputdir $INPUTDIR
min-threads $MIN_THREADS
max-threads $MAX_THREADS
step $STEP
count $COUNT
outputdir $OUTPUTDIR
EOF
}
# Parse the input arguments and get the value of the input argument.
parse_options()
{
while test $# -gt 0
do
case "$1" in
--inputdir=*)
INPUTDIR=`get_key_value "$1"`;;
--min-threads=*)
MIN_THREADS=`get_key_value "$1"`;;
--max-threads=*)
MAX_THREADS=`get_key_value "$1"`;;
--step=*)
STEP=`get_key_value "$1"`;;
--count=*)
COUNT=`get_key_value "$1"`;;
--outputdir=*)
OUTPUTDIR=`get_key_value "$1"`;;
-? | --help)
usage
print_default
exit 0;;
*)
echo "Unknown option '$1'"
exit 1;;
esac
shift
done
}
#################################################################
INPUTDIR=""
MIN_THREADS=100
MAX_THREADS=1000
STEP=100
COUNT=1
OUTPUTDIR=/opt/result
parse_options "$@"
if [ -z $INPUTDIR ]
then
echo "Please give the input file address!"
exit -1
fi
[[ -d $OUTPUTDIR ]] || mkdir -p $OUTPUTDIR
threads=$MIN_THREADS
# Test the influence of the given argument with different threads .
while [ $threads -le $MAX_THREADS ]
do
# If the number of min threads adds step bigger than the number of max threads,
# then set outputdir to the subdirectory of outputdir.
if [ $MIN_THREADS -lt $MAX_THREADS ]
then
threadpath=${INPUTDIR}/${threads}
else
threadpath=${INPUTDIR}
fi
iter=1
while [ $iter -le $COUNT ]
do
# If the count variable bigger than 1, then set outputdir to
# the subdirectory of outputdir.
if [ $COUNT -eq 1 ]
then
iterpath=${threadpath}
else
iterpath=${threadpath}/${iter}
fi
# If the subdirectory is exists, then run the analyze scripts.
if [ -d $iterpath ]
then
sysbench_file=`find $iterpath -name "sysbench_*.res"`
global_file=`find $iterpath -name "global_*.stat"`
innodb_file=`find $iterpath -name "innodb_*.stat"`
# Analyze the sysbench output result and get the sysbench report and key value.
if [ -f $sysbench_file ]
then
echo "----Analyze the sysbench report----"
./analyze/analyze_sysbench_report.sh --input=$sysbench_file --outputdir=$OUTPUTDIR
if [ $? -ne 0 ]
then
echo "Exit with error when run analyze_sysbench_report.sh procedure!"
exit -1
fi
echo "----Analyze the sysbench key value----"
./analyze/analyze_sysbench_performance_args.sh --input=$sysbench_file --outputdir=$OUTPUTDIR
if [ $? -ne 0 ]
then
echo "Exit with error when run analyze_sysbench_performance_args.sh procedure!"
exit -1
fi
else
echo "The sysbench test output data file is not exists!"
echo "Please be double check the address $iterpath where the file like 'sysbench_*.res'"
exit -1
fi
# Analyze the global innodb status result and get the innodb pages flushed and dirty page ratio
if [ -f $global_file ]
then
echo "----Analyze the global innodb page flushed----"
./analyze/analyze_global_innodb_page_flushed.sh --input=$global_file --outputdir=$OUTPUTDIR
if [ $? -ne 0 ]
then
echo "Exit with error when run analyze_global_innodb_page_flushed.sh procedure!"
exit -1
fi
echo "----Analyze the global innodb dirty page ratio----"
./analyze/analyze_global_innodb_dirty_page_ratio.sh --input=$global_file --outputdir=$OUTPUTDIR
if [ $? -ne 0 ]
then
echo "Exit with error when run analyze_global_innodb_dirty_page_ratio.sh procedure!"
exit -1
fi
else
echo "The sysbench test output data file is not exists!"
echo "Please be double check the address $iterpath where the file like 'global_*.stat'"
exit -1
fi
# Analyze the innodb status result and get the log flushed fallbehind and checkpoint.
if [ -f $innodb_file ]
then
echo "----Analyze the innodb log flushed fallbehind----"
./analyze/analyze_innodb_log_flushed_fallbehind.sh --input=$innodb_file --outputdir=$OUTPUTDIR
if [ $? -ne 0 ]
then
echo "Exit with error when run analyze_innodb_log_flushed_fallbehind.sh procedure!"
exit -1
fi
echo "----Analyze the innodb checkpoint----"
./analyze/analyze_innodb_checkpoint.sh --input=$innodb_file --outputdir=$OUTPUTDIR
if [ $? -ne 0 ]
then
echo "Exit with error when run analyze_innodb_checkpoint.sh procedure!"
exit -1
fi
else
echo "The sysbench test output data file is not exists!"
echo "Please be double check the address $iterpath where the file like 'innodb_*.stat'"
exit -1
fi
# The subdirectory is not exist.
else
echo "The input directory don\'t exist the subdirectory!"
echo "Please be double check the input file!"
exit -1
fi
iter=$(($iter+1))
done
# Summary the test result
if [ $COUNT -ne 1 ]
then
echo "Summarize the test result for thread number is ${threads}"
checkpoint_files=`find $OUTPUTDIR -name "checkpoint_*${threads}*.result"`
dirty_pages_files=`find $OUTPUTDIR -name "dirty_*${threads}*.result"`
flushed_pages_files=`find $OUTPUTDIR -name "flushed_*${threads}*.result"`
log_flushed_files=`find $OUTPUTDIR -name "log_*${threads}*.result"`
# Summary the checkpoint for each thread.
paste $checkpoint_files | awk '{sum=0;for(i=1;i<=NF;i++) sum+=$i; print sum/NF}' > $OUTPUTDIR/summary_checkpoint_${threads}_avg.result
# Summary the dirty page ratio for each thread.
paste $dirty_pages_files | awk '{sum=0;for(i=1;i<=NF;i++) sum+=$i; print sum/NF}' > $OUTPUTDIR/summary_dirty_page_ratio_${threads}_avg.result
# Summary the flushed pages for each thread.
paste $flushed_pages_files | awk '{sum=0;for(i=1;i<=NF;i++) sum+=$i; print sum/NF}' > $OUTPUTDIR/summary_flushed_pages_${threads}_avg.result
# Summary the log flushed fallbehind for each thread.
paste $log_flushed_files | awk '{sum=0;for(i=1;i<=NF;i++) sum+=$i; print sum/NF}' > $OUTPUTDIR/summary_log_flushed_fallbehind_${threads}_avg.result
fi
threads=$((${threads}+${STEP}))
done
echo "The analysis is successfully finished!"
echo "-------------------------------------"
exit 0