You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@qpid.apache.org by ri...@apache.org on 2009/06/22 18:00:02 UTC

svn commit: r787292 - in /qpid/trunk/qpid/java/perftests/bin: monitoring/ monitoring/monitor-broker.sh monitoring/runTests.sh monitoring/stop-monitored-broker.sh processing/ processing/process.sh processing/processAll.sh processing/processTests.py

Author: ritchiem
Date: Mon Jun 22 16:00:02 2009
New Revision: 787292

URL: http://svn.apache.org/viewvc?rev=787292&view=rev
Log:
Qpid-1934 : Add start/stop script for broker monitoring.
Add runTest.sh that starts a given set of tests and then generates stat files for them
Updated monitor-broker to use $! to get top pid, rather than ps|grep
made runTests.sh take a file which contains a list of tests to run.
Addition of processTests:
   Creates a Result Package suitable for processing by process.sh / processAll.sh
Updated process.sh to correctly handle new broker_cpu.log date format and to have better error handling when there is no GC data
Update to monitor-broker to record the top rate so it can be used in processing
Update to process.sh to better display the CPU x axis using the time.
Update to processTests.py to use a regex to extract the GC time stamp.

Added:
    qpid/trunk/qpid/java/perftests/bin/monitoring/
    qpid/trunk/qpid/java/perftests/bin/monitoring/monitor-broker.sh   (with props)
    qpid/trunk/qpid/java/perftests/bin/monitoring/runTests.sh   (with props)
    qpid/trunk/qpid/java/perftests/bin/monitoring/stop-monitored-broker.sh   (with props)
    qpid/trunk/qpid/java/perftests/bin/processing/
    qpid/trunk/qpid/java/perftests/bin/processing/process.sh   (with props)
    qpid/trunk/qpid/java/perftests/bin/processing/processAll.sh   (with props)
    qpid/trunk/qpid/java/perftests/bin/processing/processTests.py   (with props)

Added: qpid/trunk/qpid/java/perftests/bin/monitoring/monitor-broker.sh
URL: http://svn.apache.org/viewvc/qpid/trunk/qpid/java/perftests/bin/monitoring/monitor-broker.sh?rev=787292&view=auto
==============================================================================
--- qpid/trunk/qpid/java/perftests/bin/monitoring/monitor-broker.sh (added)
+++ qpid/trunk/qpid/java/perftests/bin/monitoring/monitor-broker.sh Mon Jun 22 16:00:02 2009
@@ -0,0 +1,200 @@
+#!/bin/bash
+#
+# This script starts a broker and then starts additional logging as required.
+#  *.pid files are generated in the LOG_DIR for later use by the stop-monitored-broker 
+#  script. 
+# 
+# Currently this process starts:
+#   - The broker with additional QPID_OPTS for gc logging
+#   - Top to monitoring the CPU usage
+#
+# Additional processes can be started and as long as they write a PID into LOG_DIR/*.pid
+# it will be shutdown with the stop script
+#
+
+#
+# Output the broker log file to aid problem diagnosis
+# then exit.
+#
+brokerFailExit()
+{
+   echo "Broker failed to start up."
+   cat $BROKER_LOG
+   exit 1 
+}
+
+showUsageExit()
+{
+  echo "Usage $0 <Path to Test Broker> <LOG DIR> <CPU Monitor Rate (s)> [Additional options to
+  pass to Qpid broker startup]"
+  exit 1
+}
+
+#
+# Perform 3 attempts to get the broker PID via ps and grep
+# if unable the output broker log and exit
+#
+getBrokerPID()
+{
+ attempts=3
+ ready=0
+ while [ $ready == 0 ] ; do
+ 
+  PID=`ps auxwww| grep java | grep Xloggc | awk '{print $2}'`
+
+  if [ ! $PID == 0 ] ; then
+   ready=1 
+  else  
+   attempts=$[ $attempts - 1 ]
+ 
+   if [ $attempts == 0 ] ; then
+     brokerFailExit
+   fi
+   
+   sleep 1
+  fi
+ done
+
+}
+
+
+#
+# Additional Check to ensure that the broker process
+# has correctly written 'Ready' to the log file.
+#
+checkBrokerStarted()
+{
+ attempts=3
+ ready=0
+ while [ $ready == 0 ] ; do
+ 
+  grep Ready $BROKER_LOG > /dev/null
+
+  if [ $? == 0 ] ; then
+   ready=1 
+  else  
+   attempts=$[ $attempts - 1 ]
+ 
+   if [ $attempts == 0 ] ; then
+     brokerFailExit
+   fi
+   
+   echo "Broker not ready sleeping 1s"
+   sleep 1
+  fi
+ done
+}
+
+#
+# Command Line setup
+#
+
+# Ensure we have minimum of three arguments
+if [[ $# > 2 ]] ; then
+  BROKER_VERSION=$1 
+  LOG_DIR=$2
+  CPU_MONITOR_RATE=$3
+  # Remove these arguments from the $@ variable
+  shift 
+  shift 
+  shift
+else
+  # If we have no arguments then use these as the default  
+  CPU_MONITOR_RATE=0.5
+  LOG_DIR=$QPID_WORK/logging
+  BROKER_VERSION=qpid-0.5
+fi
+
+
+#
+# Check the specified broker is reachable
+# it it is not the log and show usage
+#
+if [ ! -d $BROKER_VERSION ] ; then
+  echo "Broker not available at: $BROKER_VERSION"
+  showUsageExit
+fi
+
+#
+# Check to see if we have an absolute path for logging
+#
+logStart=`echo $LOG_DIR|cut -c 1`
+
+
+#
+# If we don't have an absolute path then add the current 
+# directory path to the start.
+#
+if [[ $logStart != '/' ]] ; then
+ echo -n "$LOG_DIR is not absolute, using "
+ LOG_DIR=`pwd`/$LOG_DIR
+ echo $LOG_DIR
+fi
+
+#
+# Validate that the directory does not exist
+#  - this is so we can guarrantee a clean run.
+# If it does exit then log and show usage
+#
+if [ -d $LOG_DIR ] ; then
+  echo "Log directory already exists : $LOG_DIR"
+  showUsageExit
+fi
+
+#
+# Create the logging directory
+#
+mkdir -p $LOG_DIR
+
+#
+# Variable for broker log 
+#
+BROKER_LOG=$LOG_DIR/broker.log
+
+# Variable to hold broker PID
+PID=0
+
+export QPID_OPTS="-Xloggc:$LOG_DIR/gc.log -verbose:gc -XX:+PrintGCDetails -XX:+PrintGCTimeStamps" 
+
+#
+# Start Qpid Broker
+#
+echo "Starting Broker : $BROKER_VERSION"
+pushd $BROKER_VERSION/bin > /dev/null
+./qpid-server $@ 2> $BROKER_LOG >&2 &
+popd > /dev/null
+
+# Wait and check startup was ok
+echo "Waiting for broker startup"
+getBrokerPID
+
+checkBrokerStarted
+
+echo $PID > $LOG_DIR/broker.pid
+
+# 
+# Start CPU Monitoring via TOP
+#
+echo "Starting CPU Monitor at RATE:$CPU_MONITOR_RATE on $SERVER1"
+pushd $LOG_DIR > /dev/null
+
+echo $CPU_MONITOR_RATE > top.rate 
+
+top -d $CPU_MONITOR_RATE -S -c -p $PID -b > broker_cpu.log &
+
+#
+# Get top pid using $!
+#
+echo $! > $LOG_DIR/top.pid
+
+popd > /dev/null
+
+
+#
+# Generate Stat files
+#
+echo "Generating Stat data"
+stat $BROKER_LOG > $BROKER_LOG.stat
+stat $LOG_DIR/broker_cpu.log > $LOG_DIR/broker_cpu.log.stat
+stat $LOG_DIR/gc.log > $LOG_DIR/gc.log.stat
+

Propchange: qpid/trunk/qpid/java/perftests/bin/monitoring/monitor-broker.sh
------------------------------------------------------------------------------
    svn:executable = *

Added: qpid/trunk/qpid/java/perftests/bin/monitoring/runTests.sh
URL: http://svn.apache.org/viewvc/qpid/trunk/qpid/java/perftests/bin/monitoring/runTests.sh?rev=787292&view=auto
==============================================================================
--- qpid/trunk/qpid/java/perftests/bin/monitoring/runTests.sh (added)
+++ qpid/trunk/qpid/java/perftests/bin/monitoring/runTests.sh Mon Jun 22 16:00:02 2009
@@ -0,0 +1,127 @@
+#!/bin/bash
+#
+# Run specified performance tests and 
+# gather details about the test run
+#
+
+
+runTest()
+{
+ echo "$@"
+ echo "$@ --csv -o $LOG_DIR" >> $LOG_DIR/TestRun.log 2>&1
+ ./$@ --csv -o $LOG_DIR >> $LOG_DIR/TestRun.log 2>&1
+}
+
+showUsageExit()
+{
+  echo "Usage $0 <Path to Test Pack> <LOG DIR> <TEST LIST FILE>"
+  exit 1
+}
+
+# Ensure we have minimum of three arguments
+if [[ $# > 2 ]] ; then
+  TEST_VERSION=$1 
+  LOG_DIR=$2
+  TEST_LIST=$3
+  # Remove these arguments from the $@ variable
+  shift 
+  shift
+  shift
+else
+  showUsageExit
+fi
+
+#
+# Check the specified broker is reachable
+# it it is not the log and show usage
+#
+if [ ! -d $TEST_VERSION ] ; then
+  echo "Tests not available at: $TEST_VERSION"
+  showUsageExit
+fi
+
+
+#
+# Check to see if we have an absolute path for logging
+#
+logStart=`echo $LOG_DIR|cut -c 1`
+
+#
+# If we don't have an absolute path then add the current 
+# directory path to the start.
+#
+if [[ $logStart != '/' ]] ; then
+ echo -n "$LOG_DIR is not absolute, using "
+ LOG_DIR=`pwd`/$LOG_DIR
+ echo $LOG_DIR
+fi
+
+#
+# Validate that the directory does not exist
+#  - this is so we can guarrantee a clean run.
+# If it does exit then log and show usage
+#
+if [ -d $LOG_DIR ] ; then
+  echo "Log directory already exists : $LOG_DIR"
+  showUsageExit
+fi
+
+#
+# Check to see if we have an absolute path for test list
+#
+testListStart=`echo $TEST_LIST|cut -c 1`
+
+#
+# If we don't have an absolute path then add the current 
+# directory path to the start.
+#
+if [[ $testListStart != '/' ]] ; then
+ echo -n "$TEST_LIST is not absolute, using "
+ TEST_LIST=`pwd`/$TEST_LIST
+ echo $TEST_LIST
+fi
+
+#
+# Validate that the directory does not exist
+#  - this is so we can guarrantee a clean run.
+# If it does exit then log and show usage
+#
+# -r Check file exists and is readable
+if [ ! -r $TEST_LIST ] ; then
+  echo "Test file is not readable : $TEST_LIST"
+  showUsageExit
+fi
+
+
+
+#
+# Create the logging directory
+#
+mkdir -p $LOG_DIR
+
+
+
+echo "Starting Test Run in : $TEST_VERSION"
+pushd $TEST_VERSION/bin > /dev/null
+
+#
+# Run tests
+#
+
+
+while read testCommand
+do
+ runTest $testCommand
+done < "$TEST_LIST"
+
+
+popd > /dev/null
+
+
+#
+# Generate Stat files
+#
+echo "Generating Stat data"
+for file in `find $LOG_DIR -name "*.csv"` ; do
+  stat $file > $file.stat
+done

Propchange: qpid/trunk/qpid/java/perftests/bin/monitoring/runTests.sh
------------------------------------------------------------------------------
    svn:executable = *

Added: qpid/trunk/qpid/java/perftests/bin/monitoring/stop-monitored-broker.sh
URL: http://svn.apache.org/viewvc/qpid/trunk/qpid/java/perftests/bin/monitoring/stop-monitored-broker.sh?rev=787292&view=auto
==============================================================================
--- qpid/trunk/qpid/java/perftests/bin/monitoring/stop-monitored-broker.sh (added)
+++ qpid/trunk/qpid/java/perftests/bin/monitoring/stop-monitored-broker.sh Mon Jun 22 16:00:02 2009
@@ -0,0 +1,103 @@
+#!/bin/bash
+#
+# Script to stop the running of a monitored broker
+# and the associated monitoring processes.
+#
+# Looks in the specifed directory for pid files and 
+# stops those proceses
+#
+
+
+usage()
+{
+  echo "Usage: $0 <LOG_DIR>"
+}
+
+#
+# Attempt to gracefully kill processs
+#
+stopRun()
+{
+  kill $PIDS
+}
+
+
+#
+# Forcibly stop processes
+#
+forceStopRun()
+{
+  kill -9 $PIDS
+}
+
+#
+# Show usage if we are not started correctly
+#
+if [ $# != 1 ] ; then
+ usage
+ exit 1
+fi
+
+LOG_DIR=$1
+
+
+PIDS=`cat $LOG_DIR/*.pid`
+
+echo "Preparing to stop:"
+#
+# Escape the path so we can use sed to remove it.
+#
+path=`echo $LOG_DIR|sed -e s/\\\//\\\\\\\\\\\//g`
+
+for i in `ls $LOG_DIR/*.pid` ; do
+  # Remove path from pid item then remove any final '/' 
+  echo $i|cut -d '.' -f 1| sed -e s/$path// |tr '/' ' '
+done
+
+status=`ps $PIDS |wc -l`
+
+if [ $status == 1 ] ; then
+ echo "Processes do not appear to be running."
+ echo "Have they already been stopped?"
+ exit 0
+fi
+
+attempts=3
+
+while [ ! $status == 1 ] ; do
+ stopRun
+ sleep 1
+ status=`ps $PIDS |wc -l`
+ 
+  if [ $status == 1 ] ; then
+    echo "Done"
+    exit 0
+  else  
+   attempts=$[ $attempts - 1 ]
+ 
+   if [ $attempts == 0 ] ; then
+     break
+   fi
+   
+  echo "Sleeping as processes not stopped"
+  sleep 2
+
+  fi
+done
+
+# If we haven't been able to kill the processes then 
+# forcibly do it
+if [ ! $status == 1 ] ; then
+ forceStopRun
+ sleep 1
+ status=`ps $PIDS |wc -l`
+ 
+  if [ $status == 1 ] ; then
+    echo "Done"
+  else
+    echo "Stop failed"
+    exit 1
+  fi 
+else
+ echo "Done"
+fi

Propchange: qpid/trunk/qpid/java/perftests/bin/monitoring/stop-monitored-broker.sh
------------------------------------------------------------------------------
    svn:executable = *

Added: qpid/trunk/qpid/java/perftests/bin/processing/process.sh
URL: http://svn.apache.org/viewvc/qpid/trunk/qpid/java/perftests/bin/processing/process.sh?rev=787292&view=auto
==============================================================================
--- qpid/trunk/qpid/java/perftests/bin/processing/process.sh (added)
+++ qpid/trunk/qpid/java/perftests/bin/processing/process.sh Mon Jun 22 16:00:02 2009
@@ -0,0 +1,264 @@
+#!/bin/bash 
+
+usage()
+{
+  echo "usage: process.sh <Qpid Version> <Test Type Queue/Topic/Latency..> <Volume of data %age> <broker details>"
+  echo "These parameters are used to title and name the graphs:"
+  echo 'Title = $<Qpid Version> $<Test Type> : $<Volume>% volume'
+  echo 'File  = $<Qpid Version>-$<Broker Details>-$<Test Type>-$<Volume>'
+  exit 1
+}
+
+processCMSGCFile()
+{
+ # Extract all the ParNew timings: Ignore any lines corrupted by concurrent CMS logging.
+ grep -v CMS gc.log|grep ParNew > $work/parnew.gc.log
+
+ # Calculate Full GC Data
+ grep failure -B 1 gc.log | sed -e :a -e '$!N;s/\n.(concurrent/ /;ta' -e 'P;D' |grep -v ^\-  > $work/full.gc.log
+ cut -d ':' -f 1 $work/full.gc.log > $work/full.time.log
+ sed -e 's/failure)/\#/g' $work/full.gc.log |cut -d '#' -f 2- |awk '{print $3}' > $work/full.dur.log
+
+ # Move to the working directory to further process the gathered data.
+ pushd $work &> /dev/null
+
+ # Extract the Heap and timing data.
+ #8 is the full gc data
+ #5 is the paranew gc data
+ cat parnew.gc.log | awk '{print $8}' | cut -d '(' -f 2 | cut -d 'K' -f 1 > HEAP_MAX.log
+ cat parnew.gc.log | awk '{print $8}' | cut -d 'K' -f 1 > HEAP_PRE_GC.log
+ cat parnew.gc.log | awk '{print $8}' | cut -d 'K' -f 2 | cut -d '>' -f 2 > HEAP_POST_GC.log
+ cat parnew.gc.log | awk '{print $1}' | cut -d ':' -f 1 > GC_TIME.log
+
+
+ # Calculate ParNew GC Cumulative total 
+ cat parnew.gc.log |awk '{sum=sum+$6; print $6, sum}' > GC_DUR.log
+ # Give a count of GC occurances
+ parNewGCCount=`wc -l GC_DUR.log| awk '{print $1}'`
+
+ # Create the Heap data file
+ paste GC_TIME.log  HEAP_POST_GC.log  HEAP_PRE_GC.log  HEAP_MAX.log > GC.Heap.data
+ # Create ParNew GC Duration data file for graphing
+ paste GC_TIME.log GC_DUR.log > GC.Dur.data
+ # Create Full GC occurance log file for plotting
+ paste full.time.log full.dur.log > GC.Full.data
+
+ # Calculate All GC Timing and give a count of their occurance
+ awk '{print $1}' GC_DUR.log > gc_dur.log
+ paste GC_TIME.log gc_dur.log > gc_all.log
+ cat GC.Full.data >> gc_all.log
+ sort -n gc_all.log | awk '{sum=sum+$2;print $1 , sum}' > GC.Dur.All.data
+ fullGCCount=`wc -l GC.Full.data| awk '{print $1}'`
+}
+
+processG1GCFile()
+{
+ cat gc.log | grep -e \^[0-9]*.[0-9]*\:  -e  \^.*[0-9]*.\-\> > $work/g1.gc.log
+ 
+ # Move to the working directory to further process the gathered data.
+ pushd $work &> /dev/null
+
+ # Calculate Full GC Data
+ cat g1.gc.log | sed -e :a -e '$!N;s/\n.\ *\[/ \[/;ta' -e 'P;D' > full.gc.log
+ grep Full full.gc.log |awk '{print $1}'| tr ':' ' ' > full.time.log 
+ grep Full full.gc.log |awk '{print $5}' > full.dur.log
+ fullGCCount=`wc -l GC.Full.data| awk '{print $1}'`
+ 
+ # Create Full GC occurance log file for plotting
+ paste full.time.log full.dur.log > GC.Full.data
+
+ # Extract the Heap and timing data.
+ # Create G1 Young Duration data file for graphing
+ grep "(young),"  full.gc.log |awk '{print $1}' | tr ':' ' ' > GC_TIME.log
+ grep "(young),"  full.gc.log |awk '{print $5}' > young.gc.time.log
+ # Calculate G1 young Cumulative total 
+ cat young.gc.time.log |awk '{sum=sum+$1; print $1, sum}' > GC_DUR.log
+ # Create G1 Young Duration data file for graphing
+ paste GC_TIME.log GC_DUR.log > GC.Dur.data
+ 
+ # Give a count of GC occurances
+ youngGCCount=`wc -l GC_DUR.log| awk '{print $1}'`
+ 
+ #
+ # If we have no GCs then something is wrong
+ if [ $youngGCCount == 0 ] ; then
+   echo "Error no YoungGC log entries to proceses"
+   return
+ fi
+
+ # Gather the Heap Size data
+
+ grep "(young)," full.gc.log | awk '{print $8}' | cut -d '(' -f 2 | cut -d ')' -f 1 > HEAP_MAX.Sized.log
+ grep "(young)," full.gc.log | awk '{print $8}' | cut -d '-' -f 1 > HEAP_PRE_GC.Sized.log
+ grep "(young)," full.gc.log | awk '{print $8}' | cut -d '>' -f 2| cut -d '(' -f 1 > HEAP_POST_GC.Sized.log
+
+ normaliseSizeMemFile HEAP_MAX.Sized.log HEAP_MAX.log
+ normaliseSizeMemFile HEAP_PRE_GC.Sized.log HEAP_PRE_GC.log
+ normaliseSizeMemFile HEAP_POST_GC.Sized.log HEAP_POST_GC.log	
+
+
+ # Create the Heap data file
+ paste GC_TIME.log  HEAP_POST_GC.log  HEAP_PRE_GC.log  HEAP_MAX.log > GC.Heap.data
+
+ # Calculate All GC Timing and give a count of their occurance
+ awk '{print $1}' GC_DUR.log > gc_dur.log
+ paste GC_TIME.log gc_dur.log > gc_all.log
+ cat GC.Full.data >> gc_all.log
+ sort -n gc_all.log | awk '{sum=sum+$2;print $1 , sum}' > GC.Dur.All.data
+
+}
+
+#
+# Take an input file ($1) of lines
+# <value><K|M>
+# and output file $2 of <value> in whole M
+#
+normaliseSizeMemFile()
+{
+rm -f $2
+for i in `cat $1` ; do
+ if [[ `echo $i | grep -c "K" ` == 1 ]] ; then
+  kb=`echo $i|cut -d 'K' -f 1`
+  echo $[ $kb / 1024 ] >> $2
+ else
+  echo $i|cut -d 'M' -f 1 >> $2
+ fi
+done
+
+
+}
+
+
+# Parse command line
+# TODO more advanced processing
+# Check we have enough parameters
+if [ $# != 4 ] ; then
+  # Take one arg to be a graph data file.
+  if [ $# == 1 ] ; then
+    textArray[0]="" # hold text
+    length=0
+    # read whole file in loop
+    while read line
+    do
+      textArray[$length]=$line # store line
+      length=$(expr $length + 1) # increase length by 1
+    done < $1
+
+    if [ $length != 2 ] ; then
+      usage
+    fi
+  
+    #Get Title and file name
+    title=${textArray[0]}
+    file=${textArray[1]}
+    
+    pushd `dirname $1`
+    
+  else
+    usage
+  fi
+else  
+  version=$1
+  type=$2
+  volume=$3
+  brokerState=$4
+
+
+  # Configure Graph Title and image file names
+  title="$version $type : $volume% volume"
+  file="$version-$brokerState-$type-$volume"
+fi
+
+work=work
+
+mkdir -p $work
+
+echo -n "Processing GC Usage Data : "
+ParNew=`grep -c ParNew gc.log`
+
+if [ $ParNew != 0 ] ; then 
+ echo "CMS log file"
+ processCMSGCFile 
+ PLOT="\"GC.Dur.data\" with lines axis x1y1 ti \"ParNew GC Time ($parNewGCCount)\", "
+else
+ echo "G1 log file"
+ processG1GCFile
+ PLOT="\"GC.Dur.data\" with lines axis x1y1 ti \"G1 Young Time ($youngGCCount)\", "
+fi
+
+
+
+# Prepare the plot command
+# If a Full GC occured during this test then plot those 
+if [[ $fullGCCount > 0 ]] ; then
+PLOT=$PLOT"\"GC.Dur.data\" using 1:3 with lines axis x1y2 ti \"Cumulative Total Time(ParNew)\", \
+     \"GC.Dur.All.data\" with lines axis x1y2 ti \"Cumulative Total Time(All)\", \
+     \"GC.Full.data\" with points ti \"Full GCs Time ($fullGCCount)\" "
+else
+PLOT=$PLOT"\"GC.Dur.data\" using 1:3 with lines axis x1y2 ti \"Cumulative Total Time(ParNew)\", \
+     \"GC.Dur.All.data\" with lines axis x1y2 ti \"Cumulative Total Time(All)\" "
+fi
+
+if [ $ParNew != 0 ] ; then 
+  gcs=$parNewGCCount
+else
+  gcs=$youngGCCount 
+fi
+
+# Call out to gnuplot to generate graphs
+# Only do this if we have data
+if [ $gcs != 0 ] ; then
+  # Generate the Heap Graph and the GC Duration Graph
+  gnuplot << EOGNUPLOT
+set xlabel "Run Time(s)"
+
+set title "$title : Heap Size"
+set term png
+set output "$file-Heap.png"
+set ylabel "MB" +0.5,0
+plot "GC.Heap.data" using 1:2 with lines axis x1y1 ti "GC Size Post",\
+     "GC.Heap.data" using 1:4 with lines axis x1y1 ti "GC Size Max ", \
+     "GC.Heap.data" using 1:3 with lines axis x1y1 ti "GC Size Pre "   
+
+set y2tics nomirror
+set ytics nomirror
+set key top left
+set title "$title GC Time"
+set ylabel "Time(s)" +0.5,0
+set y2label "Total Time(s)"
+set output "$file-GCDuration.png"
+plot $PLOT
+EOGNUPLOT
+
+else
+  echo "Warning: No GC Data to graph"
+fi
+
+# pop back to further process for CPU usage
+popd &> /dev/null
+
+echo "Processing CPU Usage Data"
+
+# CPU.data is just TimeStamp + %age
+cat broker_cpu.log |awk '{print $1 "T" $2 " " $3}' > $work/CPU.data
+
+# Move to work directory to run gnuplot
+pushd $work &> /dev/null
+
+# Call out to gnuplot to generate graphs
+# Generate the Heap Graph and the GC Duration Graph
+gnuplot << EOGNUPLOT
+set term png
+set title "$title : CPU"
+set output "$file-CPU.png"
+unset key
+set xlabel "Real Time(h:m)"
+set ylabel "Usage(%)"
+
+set xdata time	
+set timefmt "%Y-%m-%dT%H:%M:%S"
+set format x "%H:%M"
+plot "CPU.data" using 1:2 with lines 
+EOGNUPLOT
+
+popd &> /dev/null

Propchange: qpid/trunk/qpid/java/perftests/bin/processing/process.sh
------------------------------------------------------------------------------
    svn:executable = *

Added: qpid/trunk/qpid/java/perftests/bin/processing/processAll.sh
URL: http://svn.apache.org/viewvc/qpid/trunk/qpid/java/perftests/bin/processing/processAll.sh?rev=787292&view=auto
==============================================================================
--- qpid/trunk/qpid/java/perftests/bin/processing/processAll.sh (added)
+++ qpid/trunk/qpid/java/perftests/bin/processing/processAll.sh Mon Jun 22 16:00:02 2009
@@ -0,0 +1,33 @@
+#!/bin/bash 
+
+usage()
+{
+ echo "processResults.sh <search dir>"
+ exit 1
+}
+
+root=`pwd`
+echo $root
+graphFile=graph.data
+
+if [ $# != 1 ] ; then
+ usage
+fi
+
+mkdir -p results
+
+for file in `find $1 -name $graphFile` ; do
+
+  dir=`dirname $file`
+
+  echo Processing : $dir
+  pushd $dir &> /dev/null
+
+  $root/process.sh $graphFile
+
+  echo Copying Images
+  cp work/*png $root/results/
+  
+  popd &> /dev/null
+  echo Done
+done

Propchange: qpid/trunk/qpid/java/perftests/bin/processing/processAll.sh
------------------------------------------------------------------------------
    svn:executable = *

Added: qpid/trunk/qpid/java/perftests/bin/processing/processTests.py
URL: http://svn.apache.org/viewvc/qpid/trunk/qpid/java/perftests/bin/processing/processTests.py?rev=787292&view=auto
==============================================================================
--- qpid/trunk/qpid/java/perftests/bin/processing/processTests.py (added)
+++ qpid/trunk/qpid/java/perftests/bin/processing/processTests.py Mon Jun 22 16:00:02 2009
@@ -0,0 +1,480 @@
+#!/usr/bin/env python
+#
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+import os
+import re
+import datetime
+import sys
+import string
+from optparse import OptionParser
+from datetime import datetime, timedelta
+import shutil
+
+
+def showUsage():
+    log("./pT.py [-b|--broker-log-dir] <dir> [-t|--test-dir] <dir>")
+
+
+ACCESS="Access"
+MODIFY="Modify"
+
+BROKER_LOG="broker.log"
+BROKER_PID="broker.pid"
+BROKER_CPU="broker_cpu.log"
+BROKER_CPU_DATED="broker_cpu.log.dated"
+BROKER_GC="gc.log"
+
+GRAPH_DATA="graph.data"
+
+_verbose = False
+_debug = False
+_brokerLogs = "" 
+
+def exitError(message):
+    log(message)
+    sys.exit(1)
+
+def main():
+    global _log, _verbose, _debug, _brokerLogs
+
+    # Load the
+    parser = OptionParser()
+
+    parser.add_option("-v", "--verbose", dest="verbose",
+                      action="store_true", default=False, help="enable verbose output")
+
+    parser.add_option("-d", "--debug", dest="debug",
+                      action="store_true", default=False, help="enable debug output")
+
+    parser.add_option("-b", "--broker-log-dir", dest="brokerLogs",
+                      action="store", default=True, help="Broker Logs")
+
+    parser.add_option("-t", "--test-dir", dest="testDir",
+                      action="store", default="", help="Test Results")
+
+
+    (options, args) = parser.parse_args()
+    
+    _verbose = options.verbose
+    _debug = options.debug
+    testDir = options.testDir
+    _brokerLogs = options.brokerLogs
+    
+    if testDir == "" or _brokerLogs == "" :
+        log("Broker Log Dir and Test Dir are both requried.")
+    	showUsage()
+
+    if not os.path.exists(testDir):
+        exitError("Test directory does not exist:" + testDir)
+
+    if not os.path.exists(_brokerLogs):
+        exitError("Broker log directory does not exist:" + _brokerLogs)
+
+
+    # Standardize the format of the broker logs    
+    preProcessBrokerLogs(_brokerLogs)
+    
+    # Get list of test results from test_dir
+    processTestResults(testDir)
+
+#
+# Process the log files we know of
+#
+def preProcessBrokerLogs(resultDir):
+    # Pre-Process GC - no pre processing required 
+    
+    # Process Log4j - no processing required as file is already time stamped.
+    
+    # Pre-Process broker_cpu    
+    processCPUUsage(resultDir)
+    
+#
+# Process the broker CPU log file and create an output file of format
+# <Date Time> <CPU Usage>
+#
+#    
+def processCPUUsage(resultDir):
+    logfile=resultDir+os.sep+BROKER_CPU
+    datedFile=resultDir+os.sep+BROKER_CPU_DATED
+    
+    start = extractTime(ACCESS, logfile+".stat")        
+    
+    pid = getPID(BROKER_PID)
+    
+    topRate = getFirstLine(_brokerLogs+os.sep+"top.rate")
+    
+    #
+    # Calulate addition required per process line output
+    #
+    if topRate.find(".") == -1:
+       second = topRate
+       millis = 0
+    else:
+       split = topRate.split('.')
+       seconds = split[0]
+       # Convert 
+       millis = float("0."+split[1]) * 1000 
+        
+    offset = timedelta(seconds=int(seconds),milliseconds=int(millis))
+
+    #
+    # Process the CPU log file and make a file of format:
+    # datetime <CPU% usage> <MEM% usage>
+    #
+    # Open log CPU file for reading
+    logfile = open(logfile, "r")
+
+    # Open the output file, erasing any existing version
+    output= open(datedFile, "w")
+    for line in logfile:
+        if line.find(pid) != -1:
+	    # Split line on whitespace
+	    data = line.split()
+
+	    #
+	    # Data format
+	    # 0  1     2   3  4    5     6    7    8   9   10 11
+	    # PID USER PR  NI %CPU TIME+ %MEM VIRT RES SHR S  COMMAND    
+	    # 
+	    
+	    #Write out the date time (ISO-8601 format)
+	    output.write(str(start))
+	    # Output the %CPU value
+	    output.write(" "+str(data[4]))
+   	    # Output the %MEM value
+	    output.write(" "+str(data[5]))
+	    output.write('\n')
+
+	    # Add the offset based on the logging rate
+	    start = start + offset
+
+    # Close the files
+    logfile.close
+    output.close  
+    
+    log("Pre Process of CPU Log file '"+BROKER_CPU+"' complete")
+    
+        
+#
+# Give an known process type get the recorded PID.
+#	
+def getPID(process):
+    return getFirstLine(_brokerLogs+os.sep+process)
+    
+#
+# Get the first line of the file without EOL chars.
+# NOTE: this will load the entire file into memory to do it.
+#
+def getFirstLine(fileName):  
+    f = open(fileName,"r")
+    line = f.read().splitlines()[0]
+    f.close  
+    return line
+
+
+#
+# Walk the directory given and process all csv test results
+#
+def processTestResults(resultDir):
+    for root, dirs, files in os.walk(resultDir, topdown=False):
+        if len(files) == 0:
+	    exitError("Test result directory is empty:" + resultDir)
+        for file in files:	   
+	    if file.endswith(".csv"):
+	        processTestResult(root , file)
+
+def processTestResult(root, resultFile):    
+    # Open stat file and extract test times, we determine:
+    #  -start time based on the 'Access' value 
+    #  -end  time based on the 'Modify' value  'Change' would also work
+    
+    statFile=root+os.sep+resultFile+".stat"
+    
+    if not os.path.exists(statFile):
+        log("Unable to process : Unable to open stat file:" + statFile)
+	return
+
+  
+    createResultSetPackage(root, resultFile)
+    
+    
+def extractTime(field, statFile):
+    stats = open(statFile, "r")
+    for line in stats:
+    	if line.startswith(field):
+	    if line.find("(") == -1:
+		dt = lineToDate(" ".join(line.split()[1:]))
+		
+		#
+		# TODO We need to handle time time zone issues as I'm sure we will have issues with the
+		# log4j matching.
+		
+		stats.close
+		return dt		
+		
+#
+# Given a text line in ISO format convert it to a date object
+#				
+def lineToDate(line):
+    #
+    # Need to add checks for when time has no zone
+    #
+    date, time, tz = line.split(' ')
+		
+    year, month, day = date.split('-')
+				
+    hour, minute, s = time.split(':')
+    
+    if s.find('.') == -1:
+        second = s
+	micro = 0
+    else:
+        second, micro = s.split('.')
+				
+    return datetime(int(year),int(month),int(day),int(hour),int(minute),int(second),int(micro))		
+		
+def createResultSetPackage(root, resultFile):
+    
+    # Get the Name of the test to make a directory with said name
+    testName = resultFile.split(".csv")[0]
+    resultDir = root+ os.sep + testName
+    
+    mkdir(resultDir)
+    
+    # Move result file to new directory
+    shutil.move(root + os.sep + resultFile, resultDir)
+    
+    # Move stat file to new directory
+    shutil.move(root + os.sep + resultFile + ".stat",  resultDir)
+     
+    statFile=resultDir + os.sep + resultFile + ".stat"
+    
+    #
+    # Get start and end time for test run
+    #
+    start = extractTime(ACCESS, statFile)
+    end   = extractTime(MODIFY, statFile)
+     
+    sliceBrokerLogs(resultDir, start, end)
+    createGraphData(resultDir, testName)
+    
+    log("Created Result Package for:"+ testName)
+    
+def sliceBrokerLogs(resultDir, start, end):
+    sliceCPULog(resultDir, start, end)
+    sliceLog4j(resultDir, start, end)
+    sliceGCLog(resultDir, start, end)
+    
+
+def sliceCPULog(resultDir, start, end):
+    global _brokerLogs
+    logfile=_brokerLogs+os.sep+BROKER_CPU_DATED
+    cpuSliceFile=resultDir+os.sep+BROKER_CPU
+    
+    # Process the CPU log file and make a file of format:
+    # datetime <CPU% usage> <MEM% usage>
+    #
+    # Open log CPU file for reading
+    log = open(logfile, "r")
+    
+    #
+    # Create outputfile
+    #
+    cpuslice = open(cpuSliceFile,"w")
+    
+    for line in log:
+    	data = line.split()
+	#
+	# //fixme remove tz addition.
+	#
+	lineTime = lineToDate(" ".join(data[0:2])+" +0000")
+	
+	if lineTime > start:
+	    if lineTime < end:
+   	        cpuslice.writelines(line)
+
+    log.close()
+    cpuslice.close()
+
+def sliceGCLog(resultDir, start, end):
+    global _brokerLogs
+    logfile=_brokerLogs+os.sep+BROKER_GC
+    sliceFile=resultDir+os.sep+BROKER_GC
+    
+    gcstart = extractTime(ACCESS, logfile+".stat")        
+    
+    # Open log GC file for reading
+    log = open(logfile, "r")
+
+    # Open the output file, erasing any existing version
+    output= open(sliceFile, "w")
+    
+    # Use a regular expression to pull out the Seconds.Millis values from the
+    # Start of the gc log line.
+    pattern = re.compile(r'(?P<seconds>^[0-9]+)\.(?P<millis>[0-9]+):')
+    
+    for line in log:
+        m = pattern.match(line)
+    
+        if m:
+	    seconds = m.group('seconds');
+	    millis = m.group('millis');
+    		
+	    offset = timedelta(seconds=int(seconds),milliseconds=int(millis))     
+
+            lineTime = gcstart + offset
+	    
+	    if lineTime > start:
+	        if lineTime < end:
+   	            output.writelines(line)
+				
+    # Close the files
+    log.close
+    output.close()
+
+    
+def sliceLog4j(resultDir, start, end):
+    debug("slice log4j")
+    
+def createGraphData(resultDir, testName):
+    # Create graph.data file for process.sh
+    # Format two lines : Title and filename
+    #   $version $type : $volume% volume
+    #   $version-$brokerState-$type-$volume
+    version=getBrokerVersion()
+    
+    test= extractTestValue("n",resultDir, testName)    
+    volume = int(float(extractTestResult("Test * Size Throughput", resultDir, testName)) * 1000)
+    messageSize = extractTestValue("messageSize",resultDir, testName)
+    ackMode = ackModeToString(extractTestValue("consAckMode",resultDir, testName))
+    
+    graphDataFile=resultDir+os.sep+GRAPH_DATA
+
+    graphData = open(graphDataFile, "w")
+    
+    #
+    # Write Title
+    graphData.write(version+":"+test+":"+str(messageSize)+"kb x "+str(volume)+" msg/sec using "+ackMode)
+    graphData.write('\n')
+    
+    #
+    # Write FileName
+    graphData.writelines(version+"-"+testName)
+    graphData.write('\n')    
+    graphData.close
+    
+  
+def getBrokerVersion():
+    global _brokerLogs
+    READY = "Qpid Broker Ready"
+    brokerLogFile = _brokerLogs + os.sep + BROKER_LOG
+    
+    log = open(brokerLogFile, "r")
+    
+    dataLine = ""
+    for line in log:
+    	if line.find(READY) != -1:
+	    dataLine = line
+	    break	
+	    
+    # Log Entry
+    #2009-06-19 17:04:02,493 INFO  [main] server.Main (Main.java:456) - Qpid Broker Ready :2.3.0.1 build: 727403M	    
+    # Split on READY
+    data = dataLine.split(READY)
+    
+    # So [1] should be 
+    # :2.3.0.1 build: 727403M	        
+    readyEntries = data[1].split()
+    
+    # so spliting on white space should give us ':version'
+    # and a quick split on ':' will give us the version
+    version = readyEntries[0].split(':')[1]
+        
+    # Strip to ensure we have no whitespace
+    return version.strip()
+
+
+def extractTestValue(property,resultDir,testName):
+    return extractTestData(property,resultDir,testName," =")
+    
+def extractTestResult(property,resultDir,testName):  
+    return extractTestData(property,resultDir,testName,":")
+    
+def extractTestData(property,resultDir,testName,type):
+    resultFile = resultDir + os.sep + testName+".csv"
+    
+    results = open(resultFile, "r")
+
+    dataLine = ""
+    for line in results:
+        if line.find("Total Tests:") == 0:
+	    dataLine = line 
+
+    results.close()
+    
+    # Data is CSV
+    data = dataLine.split(',')
+    
+    found = False
+    result = ""
+    searchProperty = property+type
+
+    for entry in data:
+        if found:
+	    result = entry
+	    break
+        if entry.strip() == searchProperty:
+	    found=True
+	
+    return result.strip()    
+    
+
+def ackModeToString(ackMode):
+     if ackMode == '0':
+         return "Transacted"
+     elif ackMode == '1':
+         return "AutoAck"	 
+     elif ackMode == '2':
+         return "ClientAck"
+     elif ackMode == '3':
+         return "DupsOK" 
+     elif ackMode == '257':
+         return "NoAck"
+     elif ackMode == '258':
+         return "PreAck"
+     else:
+     	 return str(ackMode)
+
+    
+
+def debug(msg):
+   global _debug
+   if _debug:
+       log(msg)
+       
+def log(msg):
+   print msg
+    
+def mkdir(dir):
+    if not os.path.exists(dir):
+        os.mkdir(dir)
+    
+if __name__ == "__main__":
+    main()

Propchange: qpid/trunk/qpid/java/perftests/bin/processing/processTests.py
------------------------------------------------------------------------------
    svn:executable = *



---------------------------------------------------------------------
Apache Qpid - AMQP Messaging Implementation
Project:      http://qpid.apache.org
Use/Interact: mailto:commits-subscribe@qpid.apache.org