You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@xalan.apache.org by cu...@locus.apache.org on 2000/11/02 00:30:53 UTC

cvs commit: xml-xalan/test/java/src/org/apache/qetest CheckService.java ConsoleLogger.java FileBasedTest.java Logger.java OutputNameManager.java package.html Reporter.java SimpleFileCheckService.java Test.java TestfileInfo.java TestImpl.java XMLFileLogger.java

curcuru     00/11/01 15:30:53

  Added:       test/java/src/org/apache/qetest CheckService.java
                        ConsoleLogger.java FileBasedTest.java Logger.java
                        OutputNameManager.java package.html Reporter.java
                        SimpleFileCheckService.java Test.java
                        TestfileInfo.java TestImpl.java XMLFileLogger.java
  Log:
  Xalan Java-based test automation
  
  Revision  Changes    Path
  1.1                  xml-xalan/test/java/src/org/apache/qetest/CheckService.java
  
  Index: CheckService.java
  ===================================================================
  /*
   * The Apache Software License, Version 1.1
   *
   *
   * Copyright (c) 2000 The Apache Software Foundation.  All rights 
   * reserved.
   *
   * Redistribution and use in source and binary forms, with or without
   * modification, are permitted provided that the following conditions
   * are met:
   *
   * 1. Redistributions of source code must retain the above copyright
   *    notice, this list of conditions and the following disclaimer. 
   *
   * 2. Redistributions in binary form must reproduce the above copyright
   *    notice, this list of conditions and the following disclaimer in
   *    the documentation and/or other materials provided with the
   *    distribution.
   *
   * 3. The end-user documentation included with the redistribution,
   *    if any, must include the following acknowledgment:  
   *       "This product includes software developed by the
   *        Apache Software Foundation (http://www.apache.org/)."
   *    Alternately, this acknowledgment may appear in the software itself,
   *    if and wherever such third-party acknowledgments normally appear.
   *
   * 4. The names "Xalan" and "Apache Software Foundation" must
   *    not be used to endorse or promote products derived from this
   *    software without prior written permission. For written 
   *    permission, please contact apache@apache.org.
   *
   * 5. Products derived from this software may not be called "Apache",
   *    nor may "Apache" appear in their name, without prior written
   *    permission of the Apache Software Foundation.
   *
   * THIS SOFTWARE IS PROVIDED ``AS IS'' AND ANY EXPRESSED OR IMPLIED
   * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
   * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
   * DISCLAIMED.  IN NO EVENT SHALL THE APACHE SOFTWARE FOUNDATION OR
   * ITS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
   * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
   * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF
   * USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
   * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
   * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
   * OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
   * SUCH DAMAGE.
   * ====================================================================
   *
   * This software consists of voluntary contributions made by many
   * individuals on behalf of the Apache Software Foundation and was
   * originally based on software copyright (c) 2000, Lotus
   * Development Corporation., http://www.lotus.com.  For more
   * information on the Apache Software Foundation, please see
   * <http://www.apache.org/>.
   */
  
  /*
   *
   * CheckService.java
   *
   */
  package org.apache.qetest;
  
  /**
   * Interface for 'check'ing (validating) equivalence of two items.
   * Implementers provide their own algorithims for determining
   * equivalence.
   * @author Shane_Curcuru@lotus.com
   * @version $Id: CheckService.java,v 1.1 2000/11/01 23:30:51 curcuru Exp $
   */
  public interface CheckService
  {
  
      /**
       * Compare two objects for equivalence, and return appropriate result.
       * Implementers should provide the details of their "equals"
       * algorithim in getCheckMethod().
       * Note that the order of actual, reference is usually important
       * important in determining the result.
       * <li>Typically:
       * <ul>any unexpected Exceptions thrown -> ERRR_RESULT</ul>
       * <ul>actual does not exist -> FAIL_RESULT</ul>
       * <ul>reference does not exist -> AMBG_RESULT</ul>
       * <ul>actual is equivalent to reference -> PASS_RESULT</ul>
       * <ul>actual is not equivalent to reference -> FAIL_RESULT</ul>
       * </li>
       *
       * @param reporter to dump any output messages to
       * @param actual (current) Object to check
       * @param reference (gold, or expected) Object to check against
       * @param description of what you're checking
       * NEEDSDOC @param msg
       * @return Reporter.*_RESULT code denoting status; each method may define
       * it's own meanings for pass, fail, ambiguous, etc.
       */
      public abstract int check(Reporter reporter, Object actual,
                                Object reference, String msg);
  
      /**
       * Description of algorithim used to check equivalence.  
       *
       * NEEDSDOC ($objectName$) @return
       */
      public abstract String getDescription();
  
      /**
       * Gets extended information about the last check call.
       *
       * @return String describing any additional info about the last
       * two Objects that were checked
       */
      public abstract String getExtendedInfo();
  }  // end of class CheckService
  
  
  
  
  1.1                  xml-xalan/test/java/src/org/apache/qetest/ConsoleLogger.java
  
  Index: ConsoleLogger.java
  ===================================================================
  /*
   * The Apache Software License, Version 1.1
   *
   *
   * Copyright (c) 2000 The Apache Software Foundation.  All rights 
   * reserved.
   *
   * Redistribution and use in source and binary forms, with or without
   * modification, are permitted provided that the following conditions
   * are met:
   *
   * 1. Redistributions of source code must retain the above copyright
   *    notice, this list of conditions and the following disclaimer. 
   *
   * 2. Redistributions in binary form must reproduce the above copyright
   *    notice, this list of conditions and the following disclaimer in
   *    the documentation and/or other materials provided with the
   *    distribution.
   *
   * 3. The end-user documentation included with the redistribution,
   *    if any, must include the following acknowledgment:  
   *       "This product includes software developed by the
   *        Apache Software Foundation (http://www.apache.org/)."
   *    Alternately, this acknowledgment may appear in the software itself,
   *    if and wherever such third-party acknowledgments normally appear.
   *
   * 4. The names "Xalan" and "Apache Software Foundation" must
   *    not be used to endorse or promote products derived from this
   *    software without prior written permission. For written 
   *    permission, please contact apache@apache.org.
   *
   * 5. Products derived from this software may not be called "Apache",
   *    nor may "Apache" appear in their name, without prior written
   *    permission of the Apache Software Foundation.
   *
   * THIS SOFTWARE IS PROVIDED ``AS IS'' AND ANY EXPRESSED OR IMPLIED
   * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
   * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
   * DISCLAIMED.  IN NO EVENT SHALL THE APACHE SOFTWARE FOUNDATION OR
   * ITS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
   * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
   * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF
   * USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
   * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
   * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
   * OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
   * SUCH DAMAGE.
   * ====================================================================
   *
   * This software consists of voluntary contributions made by many
   * individuals on behalf of the Apache Software Foundation and was
   * originally based on software copyright (c) 2000, Lotus
   * Development Corporation., http://www.lotus.com.  For more
   * information on the Apache Software Foundation, please see
   * <http://www.apache.org/>.
   */
  
  /*
   *
   * ConsoleLogger.java
   *
   */
  package org.apache.qetest;
  
  import java.io.PrintStream;
  
  import java.util.Enumeration;
  import java.util.Hashtable;
  import java.util.Properties;
  
  /**
   * Logger that prints human-readable output to System.out.
   * @author Shane_Curcuru@lotus.com
   * @version $Id: ConsoleLogger.java,v 1.1 2000/11/01 23:30:51 curcuru Exp $
   */
  public class ConsoleLogger implements Logger
  {
  
      //-----------------------------------------------------
      //-------- Class members --------
      //-----------------------------------------------------
  
      /** Our output stream - currently hard-coded to System.out. */
      protected PrintStream outStream = System.out;
  
      /** If we're ready to start outputting yet. */
      protected boolean ready = false;
  
      /** If we should indent sub-results or not. */
      protected boolean indent = true;
  
      /** Level (number of spaces?) to indent sub-results. */
      protected StringBuffer sIndent = new StringBuffer();
  
      /** Generic properties for this Logger; sort-of replaces instance variables. */
      protected Properties loggerProps = null;
  
      //-----------------------------------------------------
      //-------- Control and utility routines --------
      //-----------------------------------------------------
  
      /** Simple constructor, does not perform initialization. */
      public ConsoleLogger()
      { /* no-op */
      }
  
      /**
       * Constructor calls initialize(p).
       * @param p Properties block to initialize us with.
       */
      public ConsoleLogger(Properties p)
      {
          ready = initialize(p);
      }
  
      /**
       * Return a description of what this Logger does.
       * @return "reports results to System.out".
       */
      public String getDescription()
      {
          return ("org.apache.qetest.ConsoleLogger - reports results to System.out.");
      }
  
      /**
       * Returns information about the Property name=value pairs that
       * are understood by this Logger/Reporter.
       * @return same as {@link java.applet.Applet.getParameterInfo}.
       */
      public String[][] getParameterInfo()
      {
  
          String pinfo[][] =
          {
              { OPT_INDENT, "boolean", "If reporter should indent sub-results" }
          };
  
          return pinfo;
      }
  
      /**
       * Accessor methods for our properties block.  
       *
       * NEEDSDOC ($objectName$) @return
       */
      public Properties getProperties()
      {
          return loggerProps;
      }
  
      /**
       * Accessor methods for our properties block.
       * @param p Properties to set (is cloned).
       */
      public void setProperties(Properties p)
      {
  
          if (p != null)
          {
              loggerProps = (Properties) p.clone();
          }
      }
  
      /**
       * Call once to initialize this Logger/Reporter from Properties.
       * @param Properties block to initialize from.
       * @param status, true if OK, false if an error occoured.
       *
       * NEEDSDOC @param p
       *
       * NEEDSDOC ($objectName$) @return
       */
      public boolean initialize(Properties p)
      {
  
          setProperties(p);
  
          String i = loggerProps.getProperty(OPT_INDENT);
  
          if (i != null)
          {
              if (i.toLowerCase().equals("no")
                      || i.toLowerCase().equals("false"))
                  indent = false;
              else if (i.toLowerCase().equals("yes")
                       || i.toLowerCase().equals("true"))
                  indent = true;
          }
  
          ready = true;
  
          return true;
      }
  
      /**
       * Is this Logger/Reporter ready to log results?
       * @return status - true if it's ready to report, false otherwise
       */
      public boolean isReady()
      {
          return ready;
      }
  
      /**
       * Is this Logger/Reporter still running OK?
       * @return false - ConsoleLoggers never have errors
       */
      public boolean checkError()
      {
          return false;
      }
  
      /**
       * Flush this Logger/Reporter - no-op for ConsoleLogger.
       */
      public void flush()
      { /* no-op */
      }
  
      /**
       * Close this Logger/Reporter - essentially no-op for ConsoleLogger.
       */
      public void close()
      {
  
          flush();
  
          ready = false;
      }
  
      /** Simplistic indenting - two spaces. */
      protected void indent()
      {
          if (indent)
              sIndent.append("  ");
      }
  
      /** Simplistic outdenting - two spaces. */
      protected void outdent()
      {
          if ((indent) && (sIndent.length() >= 2))
              sIndent.setLength(sIndent.length() - 2);
      }
  
      //-----------------------------------------------------
      //-------- Testfile / Testcase start and stop routines --------
      //-----------------------------------------------------
  
      /**
       * Report that a testfile has started.
       * @param name file name or tag specifying the test.
       * @param comment comment about the test.
       */
      public void testFileInit(String name, String comment)
      {
          outStream.println(sIndent + "TestFileInit " + name + ":" + comment);
          indent();
      }
  
      /**
       * Report that a testfile has finished, and report it's result.
       * @param msg message or name of test to log out
       * @param result result of testfile
       */
      public void testFileClose(String msg, String result)
      {
          outdent();
          outStream.println(sIndent + "TestFileClose(" + result + ") " + msg);
      }
  
      /**
       * Report that a testcase has started.
       * @param comment short description of this test case's objective.
       */
      public void testCaseInit(String comment)
      {
          outStream.println(sIndent + "TestCaseInit " + comment);
          indent();
      }
  
      /**
       * Report that a testcase has finished, and report it's result.
       * @param msg message of name of test case to log out
       * @param result result of testfile
       */
      public void testCaseClose(String msg, String result)
      {
          outdent();
          outStream.println(sIndent + "TestCaseClose(" + result + ") " + msg);
      }
  
      //-----------------------------------------------------
      //-------- Test results logging routines --------
      //-----------------------------------------------------
  
      /**
       * Report a comment to result file with specified severity.
       * ConsoleLoggers ignore message severities.
       * @param level severity or class of message.
       * @param msg comment to log out.
       */
      public void logMsg(int level, String msg)
      {
          outStream.println(sIndent + msg);
      }
  
      /**
       * Report an arbitrary String to result file with specified severity.
       * Log out the String provided exactly as-is.
       * @param severity or class of message.
       * @param arbitrary String to log out.
       *
       * NEEDSDOC @param level
       * NEEDSDOC @param msg
       */
      public void logArbitrary(int level, String msg)
      {
          outStream.println(msg);
      }
  
      /**
       * Logs out statistics to result file with specified severity.
       * @param severity of message.
       *
       * NEEDSDOC @param level
       * @param lVal statistic in long format.
       * @param dVal statistic in double format.
       * @param msg comment to log out.
       */
      public void logStatistic(int level, long lVal, double dVal, String msg)
      {
          outStream.println(sIndent + msg + " l: " + lVal + " d: " + dVal);
      }
  
      /**
       * Logs out a element to results with specified severity.
       * Simply indents and dumps output as string like so:
       * <pre>
       *    element
       *    attr1=value1
       *    ...
       *    msg.toString()
       * </pre>
       * @param level severity of message.
       * @param element name of enclosing element
       * @param attrs hash of name=value attributes
       * @param msg Object to log out; up to reporters to handle
       * processing of this; usually logs just .toString().
       */
      public void logElement(int level, String element, Hashtable attrs,
                             Object msg)
      {
  
          indent();
          outStream.println(sIndent + element);
          indent();
  
          for (Enumeration enum = attrs.keys();
                  enum.hasMoreElements(); /* no increment portion */ )
          {
              Object key = enum.nextElement();
  
              outStream.println(sIndent + key.toString() + "="
                                + attrs.get(key).toString());
          }
  
          outdent();
          outStream.println(sIndent + msg.toString());
          outdent();
      }
  
      /**
       * Logs out contents of a Hashtable with specified severity.
       * @param level severity or class of message.
       * @param hash Hashtable to log the contents of.
       * @param msg decription of the Hashtable.
       */
      public void logHashtable(int level, Hashtable hash, String msg)
      {
  
          indent();
          outStream.println(sIndent + "HASHTABLE: " + msg);
          indent();
  
          if (hash == null)
          {
              outStream.println(sIndent + "hash == null, no data");
          }
          else
          {
              try
              {
  
                  // Fake the Properties-like output
                  for (Enumeration enum = hash.keys();
                          enum.hasMoreElements(); /* no increment portion */ )
                  {
                      Object key = enum.nextElement();
  
                      outStream.println(sIndent + key.toString() + "="
                                        + hash.get(key).toString());
                  }
              }
              catch (Exception e)
              {
  
                  // No-op: should ensure we have clean output
              }
          }
  
          outdent();
          outdent();
      }
  
      //-----------------------------------------------------
      //-------- Test results reporting check* routines --------
      //-----------------------------------------------------
  
      /**
       * Writes out a Pass record with comment.
       * @param comment comment to log with the pass record.
       */
      public void checkPass(String comment)
      {
          outStream.println(sIndent + "PASS!  " + comment);
      }
  
      /**
       * Writes out an ambiguous record with comment.
       * @param comment comment to log with the ambg record.
       */
      public void checkAmbiguous(String comment)
      {
          outStream.println(sIndent + "AMBG   " + comment);
      }
  
      /**
       * Writes out a Fail record with comment.
       * @param comment comment to log with the fail record.
       */
      public void checkFail(String comment)
      {
          outStream.println(sIndent + "FAIL   " + comment);
      }
  
      /**
       * Writes out a Error record with comment.
       * @param comment comment to log with the error record.
       */
      public void checkErr(String comment)
      {
          outStream.println(sIndent + "ERROR  " + comment);
      }
  }  // end of class ConsoleLogger
  
  
  
  
  1.1                  xml-xalan/test/java/src/org/apache/qetest/FileBasedTest.java
  
  Index: FileBasedTest.java
  ===================================================================
  /*
   * The Apache Software License, Version 1.1
   *
   *
   * Copyright (c) 2000 The Apache Software Foundation.  All rights 
   * reserved.
   *
   * Redistribution and use in source and binary forms, with or without
   * modification, are permitted provided that the following conditions
   * are met:
   *
   * 1. Redistributions of source code must retain the above copyright
   *    notice, this list of conditions and the following disclaimer. 
   *
   * 2. Redistributions in binary form must reproduce the above copyright
   *    notice, this list of conditions and the following disclaimer in
   *    the documentation and/or other materials provided with the
   *    distribution.
   *
   * 3. The end-user documentation included with the redistribution,
   *    if any, must include the following acknowledgment:  
   *       "This product includes software developed by the
   *        Apache Software Foundation (http://www.apache.org/)."
   *    Alternately, this acknowledgment may appear in the software itself,
   *    if and wherever such third-party acknowledgments normally appear.
   *
   * 4. The names "Xalan" and "Apache Software Foundation" must
   *    not be used to endorse or promote products derived from this
   *    software without prior written permission. For written 
   *    permission, please contact apache@apache.org.
   *
   * 5. Products derived from this software may not be called "Apache",
   *    nor may "Apache" appear in their name, without prior written
   *    permission of the Apache Software Foundation.
   *
   * THIS SOFTWARE IS PROVIDED ``AS IS'' AND ANY EXPRESSED OR IMPLIED
   * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
   * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
   * DISCLAIMED.  IN NO EVENT SHALL THE APACHE SOFTWARE FOUNDATION OR
   * ITS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
   * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
   * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF
   * USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
   * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
   * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
   * OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
   * SUCH DAMAGE.
   * ====================================================================
   *
   * This software consists of voluntary contributions made by many
   * individuals on behalf of the Apache Software Foundation and was
   * originally based on software copyright (c) 2000, Lotus
   * Development Corporation., http://www.lotus.com.  For more
   * information on the Apache Software Foundation, please see
   * <http://www.apache.org/>.
   */
  
  /*
   *
   * FileBasedTest.java
   *
   */
  package org.apache.qetest;
  
  import java.io.File;
  import java.io.FileInputStream;
  import java.io.IOException;
  
  import java.util.Properties;
  import java.util.StringTokenizer;
  import java.util.Vector;
  
  //-------------------------------------------------------------------------
  
  /**
   * Base class for file-based tests.
   * Many tests will need to operate on files external to a product
   * under test.  This class provides useful, generic functionality
   * in these cases.
   * <p>FileBasedTest defines a number of common fields that many
   * tests that operate on data files may use.</p>
   * <ul>These are each pre-initialized for you from the command line or property file.
   * <li>inputDir (string representing dir where input files come from)</li>
   * <li>outputDir (string representing dir where output, working, temp files go)</li>
   * <li>goldDir  (string representing dir where known good reference files are)</li>
   * <li>debug (generic boolean flag for debugging)</li>
   * <li>loggers (FQCN;of;Loggers to add to our Reporter)</li>
   * <li>loggingLevel (passed to Reporters)</li>
   * <li>logFile (string filename for any file-based Reporter)</li>
   * </ul>
   * @author Shane_Curcuru@lotus.com
   * @version 3.0
   */
  public class FileBasedTest extends TestImpl
  {
  
      /**
       * Convenience method to print out usage information.
       * @author Shane Curcuru
       * <p>Should be overridden by subclasses, although they are free
       * to call super.usage() to get the common options string.</p>
       *
       * NEEDSDOC ($objectName$) @return
       */
      public String usage()
      {
  
          return ("Common options supported by FileBasedTest:\n" + "    -"
                  + OPT_LOAD
                  + " <loadPropFile>  (read in a .properties file,\n"
                  + "                           that can set any/all of the other opts)\n"
                  + "    -" + OPT_INPUTDIR + "     <path to input files>\n"
                  + "    -" + OPT_OUTPUTDIR
                  + "    <path to output area - where all output is sent>\n"
                  + "    -" + OPT_GOLDDIR
                  + "      <path to gold reference output>\n" + "    -"
                  + Logger.OPT_LOGFILE
                  + "      <resultsFileName> (sends test results to file)\n"
                  + "    -" + Reporter.OPT_LOGGERS
                  + "      <FQCN;of;Loggers to use >\n" + "    -"
                  + Reporter.OPT_LOGGINGLEVEL + " <int level>\n" + "    -"
                  + Reporter.OPT_DEBUG
                  + "        (prints extra debugging info)\n");
      }
  
      //-----------------------------------------------------
      //-------- Constants for common input params --------
      //-----------------------------------------------------
  
      /**
       * Parameter: Load properties file for options
       * <p>Will load named file as a Properties block, setting any
       * applicable options. Command line takes precedence.
       * Format: <code>-load FileName.prop</code></p>
       */
      public static final String OPT_LOAD = "load";
  
      /** NEEDSDOC Field load          */
      protected String load = null;
  
      /**
       * Parameter: Where are test input files?
       * <p>Default: .\inputs.
       * Format: <code>-inputDir path\to\dir</code></p>
       */
      public static final String OPT_INPUTDIR = "inputDir";
  
      /** NEEDSDOC Field inputDir          */
      protected String inputDir = "." + File.separator + "inputs";
  
      /**
       * Parameter: Where should we place output files (or temp files, etc.)?
       * <p>Default: .\outputs.
       * Format: <code>-outputDir path\to\dir</code></p>
       */
      public static final String OPT_OUTPUTDIR = "outputDir";
  
      /** NEEDSDOC Field outputDir          */
      protected String outputDir = "." + File.separator + "outputs";
  
      /**
       * Parameter: Where should get "gold" pre-validated XML files?
       * <p>Default: .\golds.
       * Format: <code>-goldDir path\to\dir</code></p>
       */
      public static final String OPT_GOLDDIR = "goldDir";
  
      /** NEEDSDOC Field goldDir          */
      protected String goldDir = "." + File.separator + "golds";
  
      /**
       * Parameter: if Reporters should log performance data, true/false.
       */
      protected boolean perfLogging = false;
  
      /**
       * Parameter: general purpose debugging flag.
       */
      protected boolean debug = false;
  
      //-----------------------------------------------------
      //-------- Class members and accessors --------
      //-----------------------------------------------------
  
      /**
       * Total Number of test case methods defined in this test.
       * <p>Tests must either set this variable or override runTestCases().</p>
       * <p>Unless you override runTestCases(), test cases must be named like so:.</p>
       * <p>Tests must either set this variable or override runTestCases().</p>
       * <p>&nbsp;&nbsp;testCase<I>N</I>, where <I>N</I> is a consecutively
       * numbered whole integer (1, 2, 3,....</p>
       * @see #runTestCases
       */
      public int numTestCases = 0;
  
      /**
       * Generic Properties block for storing initialization info.
       * All startup options get stored in here for later use, both by
       * the test itself and by any Reporters we use.
       */
      protected Properties testProps = new Properties();
  
      /**
       * Accessor method for our Properties block, for use by harnesses.
       *
       * NEEDSDOC @param p
       */
      public void setProperties(Properties p)
      {
  
          // Don't allow setting to null!
          if (p != null)
          {
              testProps = (Properties) p.clone();
          }
      }
  
      /**
       * Accessor method for our Properties block, for use by harnesses.
       *
       * NEEDSDOC ($objectName$) @return
       */
      public Properties getProperties()
      {
          return testProps;
      }
  
      /**
       * Default constructor - initialize testName, Comment.
       */
      public FileBasedTest()
      {
  
          // Only set them if they're not set
          if (testName == null)
              testName = "FileBasedTest.defaultName";
  
          if (testComment == null)
              testComment = "FileBasedTest.defaultComment";
      }
  
      //-----------------------------------------------------
      //-------- Implement Test/TestImpl methods --------
      //-----------------------------------------------------
  
      /**
       * Initialize this test - called once before running testcases.
       * <p>Use the loggers field to create some loggers in a Reporter.</p>
       * @author Shane_Curcuru@lotus.com
       * @see TestImpl#testFileInit(java.util.Properties)
       *
       * NEEDSDOC @param p
       *
       * NEEDSDOC ($objectName$) @return
       */
      public boolean preTestFileInit(Properties p)
      {
  
          // Pass our properties block directly to the reporter
          //  so it can use the same values in initialization
          // A Reporter will auto-initialize from the values
          //  in the properties block
          setReporter(new Reporter(p));
          reporter.addDefaultLogger();  // add default logger if needed
          reporter.testFileInit(testName, testComment);
  
          return true;
      }
  
      /**
       * Initialize this test - called once before running testcases.
       * <p>Subclasses <b>must</b> override this to do whatever specific
       * processing they need to initialize their product under test.</p>
       * <p>If for any reason the test should not continue, it <b>must</b>
       * return false from this method.</p>
       * @author Shane_Curcuru@lotus.com
       * @see TestImpl#testFileInit(java.util.Properties)
       *
       * NEEDSDOC @param p
       *
       * NEEDSDOC ($objectName$) @return
       */
      public boolean doTestFileInit(Properties p)
      {
  
          // @todo implement in your subclass
          reporter.logTraceMsg(
              "FileBasedTest.doTestFileInit() default implementation - please override");
  
          return true;
      }
  
      // Use default implementation of postTestFileInit()
  
      /**
       * Run all of our testcases.
       * <p>use nifty FileBasedTestReporter.executeTests().  May be overridden
       * by subclasses to do their own processing.  If you do not override,
       * you must set numTestCases properly!</p>
       * @author Shane Curcuru
       *
       * NEEDSDOC @param p
       *
       * NEEDSDOC ($objectName$) @return
       */
      public boolean runTestCases(Properties p)
      {
  
          // Properties may be currently unused
          reporter.executeTests(this, numTestCases, p);
  
          return true;
      }
  
      /**
       * Cleanup this test - called once after running testcases.
       * @author Shane Curcuru
       * <p>Tests should override if they need to do any cleanup.</p>
       *
       * NEEDSDOC @param p
       *
       * NEEDSDOC ($objectName$) @return
       */
      public boolean doTestFileClose(Properties p)
      {
  
          reporter.logTraceMsg(
              "FileBasedTest.doTestFileClose() default implementation - please override");
  
          return true;
      }
  
      // Use default implementations of pre/postTestFileClose()
      //-----------------------------------------------------
      //-------- Initialize our common input params --------
      //-----------------------------------------------------
  
      /**
       * Set our instance variables from a Properties file.
       * <p>Must <b>not</b> use reporter.</p>
       * @author Shane Curcuru
       * @param Properties block to set name=value pairs from
       *
       * NEEDSDOC @param props
       * @return status - true if OK, false if error.
       * @todo improve error checking, if needed
       */
      public boolean initializeFromProperties(Properties props)
      {
  
          debugPrintln("FileBasedTest.initializeFromProperties(" + props + ")");
  
          // Parse out any values that match our internal convenience variables
          // default all values to our current values
          // String values are simply getProperty()'d
          inputDir = props.getProperty(OPT_INPUTDIR, inputDir);
  
          if (inputDir != null)
              testProps.put(OPT_INPUTDIR, inputDir);
  
          outputDir = props.getProperty(OPT_OUTPUTDIR, outputDir);
  
          if (outputDir != null)
              testProps.put(OPT_OUTPUTDIR, outputDir);
  
          goldDir = props.getProperty(OPT_GOLDDIR, goldDir);
  
          if (goldDir != null)
              testProps.put(OPT_GOLDDIR, goldDir);
  
          // Use a temp string for those properties we only set 
          //  in our testProps, but don't bother to save ourselves
          String temp = null;
  
          temp = props.getProperty(Reporter.OPT_LOGGERS);
  
          if (temp != null)
              testProps.put(Reporter.OPT_LOGGERS, temp);
  
          temp = props.getProperty(Logger.OPT_LOGFILE);
  
          if (temp != null)
              testProps.put(Logger.OPT_LOGFILE, temp);
  
          // boolean values just check for the non-default value
          String dbg = props.getProperty(Reporter.OPT_DEBUG);
  
          if ((dbg != null) && dbg.equalsIgnoreCase("true"))
          {
              debug = true;
  
              testProps.put(Reporter.OPT_DEBUG, "true");
          }
  
          String pLog = props.getProperty(Reporter.OPT_PERFLOGGING);
  
          if ((pLog != null) && pLog.equalsIgnoreCase("true"))
          {
              perfLogging = true;
  
              testProps.put(Reporter.OPT_PERFLOGGING, "true");
          }
  
          temp = props.getProperty(Reporter.OPT_LOGGINGLEVEL);
  
          if (temp != null)
              testProps.put(Reporter.OPT_LOGGINGLEVEL, temp);
  
          return true;
      }
  
      /**
       * Sets the provided fields with data from an array, presumably
       * from the command line.
       * <p>May be overridden by subclasses, although you should probably
       * read the code to see what default options this handles. Must
       * not use reporter. Calls initializeFromProperties(). After that,
       * sets any internal variables that match items in the array like:
       * <code> -param1 value1 -paramNoValue -param2 value2 </code>
       * Any params that do not match internal variables are simply set
       * into our properties block for later use.  This allows subclasses
       * to simply get their initialization data from the testProps
       * without having to make code changes here.</p>
       * <p>Assumes all params begin with "-" dash, and that all values
       * do <b>not</b> start with a dash.</p>
       * @author Shane Curcuru
       * @param String[] array of arguments
       *
       * NEEDSDOC @param args
       * @param flag: are we being called from a subclass?
       * @return status - true if OK, false if error.
       */
      public boolean initializeFromArray(String[] args, boolean flag)
      {
  
          debugPrintln("FileBasedTest.initializeFromArray(" + args + ")");
  
          // Read in command line args and setup internal variables
          String optPrefix = "-";
          int nArgs = args.length;
  
          // We don't require any arguments: but subclasses might 
          //  want to require certain ones
          // Must read in properties file first, so cmdline can 
          //  override values from properties file
          boolean propsOK = true;
  
          // IF we are being called the first time on this 
          //  array of arguments, go ahead and process unknown ones
          //  otherwise, don't bother
          if (flag)
          {
              for (int k = 0; k < nArgs; k++)
              {
                  if (args[k].equalsIgnoreCase(optPrefix + OPT_LOAD))
                  {
                      if (++k >= nArgs)
                      {
                          System.err.println(
                              "ERROR: must supply properties filename for: "
                              + optPrefix + OPT_LOAD);
  
                          return false;
                      }
  
                      load = args[k];
  
                      try
                      {
  
                          // Load named file into our properties block
                          FileInputStream fIS = new FileInputStream(load);
                          Properties p = new Properties();
  
                          p.load(fIS);
  
                          propsOK &= initializeFromProperties(p);
                      }
                      catch (Exception e)
                      {
                          System.err.println(
                              "ERROR: loading properties file failed: " + load);
                          e.printStackTrace();
  
                          return false;
                      }
  
                      break;
                  }
              }  // end of for(...)
          }  // end of if ((flag))
  
          // Now read in the rest of the command line
          // @todo cleanup loop to be more table-driven
          for (int i = 0; i < nArgs; i++)
          {
  
              // Set any String args and place them in testProps
              if (args[i].equalsIgnoreCase(optPrefix + OPT_INPUTDIR))
              {
                  if (++i >= nArgs)
                  {
                      System.err.println("ERROR: must supply arg for: "
                                         + optPrefix + OPT_INPUTDIR);
  
                      return false;
                  }
  
                  inputDir = args[i];
  
                  testProps.put(OPT_INPUTDIR, inputDir);
  
                  continue;
              }
  
              if (args[i].equalsIgnoreCase(optPrefix + OPT_OUTPUTDIR))
              {
                  if (++i >= nArgs)
                  {
                      System.err.println("ERROR: must supply arg for: "
                                         + optPrefix + OPT_OUTPUTDIR);
  
                      return false;
                  }
  
                  outputDir = args[i];
  
                  testProps.put(OPT_OUTPUTDIR, outputDir);
  
                  continue;
              }
  
              if (args[i].equalsIgnoreCase(optPrefix + OPT_GOLDDIR))
              {
                  if (++i >= nArgs)
                  {
                      System.err.println("ERROR: must supply arg for: "
                                         + optPrefix + OPT_GOLDDIR);
  
                      return false;
                  }
  
                  goldDir = args[i];
  
                  testProps.put(OPT_GOLDDIR, goldDir);
  
                  continue;
              }
  
              if (args[i].equalsIgnoreCase(optPrefix + Reporter.OPT_LOGGERS))
              {
                  if (++i >= nArgs)
                  {
                      System.err.println("ERROR: must supply arg for: "
                                         + optPrefix + Reporter.OPT_LOGGERS);
  
                      return false;
                  }
  
                  testProps.put(Reporter.OPT_LOGGERS, args[i]);
  
                  continue;
              }
  
              if (args[i].equalsIgnoreCase(optPrefix + Logger.OPT_LOGFILE))
              {
                  if (++i >= nArgs)
                  {
                      System.err.println("ERROR: must supply arg for: "
                                         + optPrefix + Logger.OPT_LOGFILE);
  
                      return false;
                  }
  
                  testProps.put(Logger.OPT_LOGFILE, args[i]);
  
                  continue;
              }
  
              // Boolean values are simple flags to switch from defaults only
              if (args[i].equalsIgnoreCase(optPrefix + Reporter.OPT_DEBUG))
              {
                  debug = true;
  
                  testProps.put(Reporter.OPT_DEBUG, "true");
  
                  continue;
              }
  
              if (args[i].equalsIgnoreCase(optPrefix
                                           + Reporter.OPT_PERFLOGGING))
              {
                  testProps.put(Reporter.OPT_PERFLOGGING, "true");
  
                  continue;
              }
  
              // Parse out the integer value
              //  This isn't strictly necessary since the catch-all 
              //  below should take care of it, but better safe than sorry
              if (args[i].equalsIgnoreCase(optPrefix
                                           + Reporter.OPT_LOGGINGLEVEL))
              {
                  if (++i >= nArgs)
                  {
                      System.err.println("ERROR: must supply arg for: "
                                         + optPrefix
                                         + Reporter.OPT_LOGGINGLEVEL);
  
                      return false;
                  }
  
                  try
                  {
                      testProps.put(Reporter.OPT_LOGGINGLEVEL, args[i]);
                  }
                  catch (NumberFormatException numEx)
                  { /* no-op */
                  }
  
                  continue;
              }
  
              // IF we are being called the first time on this 
              //  array of arguments, go ahead and process unknown ones
              //  otherwise, don't bother
              if (flag)
              {
  
                  // Found an arg that we don't know how to process,
                  //  so store it for any subclass' use as a catch-all
                  // If it starts with - dash, and another non-dash arg follows,
                  //  set as a name=value pair in the property block
                  if ((args[i].startsWith(optPrefix)) && (i + 1 < nArgs)
                          && (!args[i + 1].startsWith(optPrefix)))
                  {
  
                      // Scrub off the "-" prefix before setting the name
                      testProps.put(args[i].substring(1), args[i + 1]);
  
                      i++;  // Increment counter to skip next arg
                  }
  
                  // Otherwise, just set as name="" in the property block
                  else
                  {
  
                      // Scrub off the "-" prefix before setting the name
                      testProps.put(args[i].substring(1), "");
                  }
              }
          }  // end of for() loop
  
          debugPrintln(
              "FileBasedTest.initializeFromArray(): testProps are now:");
  
          if (debug)
              testProps.list(System.err);
  
          // If we got here, we set the array params OK, so simply return 
          //  the value the initializeFromProperties method returned
          return propsOK;
      }
  
      //-----------------------------------------------------
      //-------- Other useful and utility methods --------
      //-----------------------------------------------------
  
      /**
       * Create a TestfileInfo object from our paths.
       * From a single base filename, fill in fully qualified paths for
       * the inputName, outputName, goldName from our inputDir,
       * outputDir, goldDir.
       * Note: uses semicolon ':' as separator!
       * @author Shane Curcuru
       * @param basename of file
       * @return TestfileInfo with *Name fields set
       */
      public TestfileInfo createTestfileInfo(String basename)
      {
  
          TestfileInfo t = new TestfileInfo();
  
          try
          {
              t.inputName = (new File(inputDir)).getCanonicalPath()
                            + File.separatorChar + basename;
          }
          catch (IOException ioe)
          {
              t.inputName = (new File(inputDir)).getAbsolutePath()
                            + File.separatorChar + basename;
          }
  
          try
          {
              t.outputName = (new File(outputDir)).getCanonicalPath()
                             + File.separatorChar + basename;
          }
          catch (IOException ioe)
          {
              t.outputName = (new File(outputDir)).getAbsolutePath()
                             + File.separatorChar + basename;
          }
  
          try
          {
              t.goldName = (new File(goldDir)).getCanonicalPath()
                           + File.separatorChar + basename;
          }
          catch (IOException ioe)
          {
              t.goldName = (new File(goldDir)).getAbsolutePath()
                           + File.separatorChar + basename;
          }
  
          return t;
      }
  
      /**
       * Debugging the Test infrastructure - dumps to System.err.  
       *
       * NEEDSDOC @param s
       */
      protected void debugPrintln(String s)
      {
  
          if (!debug)
              return;
  
          System.err.println(s);
      }
  
      /**
       * Main method to run test from the command line.
       * <p>Test subclasses <b>must</b> override, obviously.
       * Only provided here for debugging.</p>
       * @author Shane Curcuru
       *
       * NEEDSDOC @param args
       */
      public static void main(String[] args)
      {
  
          FileBasedTest app = new FileBasedTest();
  
          // Initialize any instance variables from the command line 
          //  OR specified properties block
          if (!app.initializeFromArray(args, true))
          {
              System.err.println("ERROR in usage:");
              System.err.println(app.usage());
  
              // Don't use System.exit, since that will blow away any containing harnesses
              return;
          }
  
          // Also pass along the command line, in case someone has 
          //  specific code that's counting on this
          app.testProps.put(MAIN_CMDLINE, args);
          app.runTest(app.testProps);
      }
  }  // end of class FileBasedTest
  
  
  
  
  1.1                  xml-xalan/test/java/src/org/apache/qetest/Logger.java
  
  Index: Logger.java
  ===================================================================
  /*
   * The Apache Software License, Version 1.1
   *
   *
   * Copyright (c) 2000 The Apache Software Foundation.  All rights 
   * reserved.
   *
   * Redistribution and use in source and binary forms, with or without
   * modification, are permitted provided that the following conditions
   * are met:
   *
   * 1. Redistributions of source code must retain the above copyright
   *    notice, this list of conditions and the following disclaimer. 
   *
   * 2. Redistributions in binary form must reproduce the above copyright
   *    notice, this list of conditions and the following disclaimer in
   *    the documentation and/or other materials provided with the
   *    distribution.
   *
   * 3. The end-user documentation included with the redistribution,
   *    if any, must include the following acknowledgment:  
   *       "This product includes software developed by the
   *        Apache Software Foundation (http://www.apache.org/)."
   *    Alternately, this acknowledgment may appear in the software itself,
   *    if and wherever such third-party acknowledgments normally appear.
   *
   * 4. The names "Xalan" and "Apache Software Foundation" must
   *    not be used to endorse or promote products derived from this
   *    software without prior written permission. For written 
   *    permission, please contact apache@apache.org.
   *
   * 5. Products derived from this software may not be called "Apache",
   *    nor may "Apache" appear in their name, without prior written
   *    permission of the Apache Software Foundation.
   *
   * THIS SOFTWARE IS PROVIDED ``AS IS'' AND ANY EXPRESSED OR IMPLIED
   * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
   * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
   * DISCLAIMED.  IN NO EVENT SHALL THE APACHE SOFTWARE FOUNDATION OR
   * ITS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
   * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
   * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF
   * USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
   * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
   * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
   * OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
   * SUCH DAMAGE.
   * ====================================================================
   *
   * This software consists of voluntary contributions made by many
   * individuals on behalf of the Apache Software Foundation and was
   * originally based on software copyright (c) 2000, Lotus
   * Development Corporation., http://www.lotus.com.  For more
   * information on the Apache Software Foundation, please see
   * <http://www.apache.org/>.
   */
  
  /*
   *
   * Logger.java
   *
   */
  package org.apache.qetest;
  
  import java.util.Hashtable;
  import java.util.Properties;
  
  /**
   * Interface defining a utility that can log out test results.
   * This interface defnines a standalone utility that can be used
   * to report the results of a test.  It would commonly be used by a
   * testing utility library to produce actual output from the run
   * of a test file.
   * <p>The Logger defines a minimal interface for expressing the result
   * of a test in a generic manner.  Different Loggers can be written
   * to both express the results in different places (on a live console,
   * in a persistent file, over a network) and in different formats -
   * perhaps an XMLTestLogger would express the results in an
   * XML file or object.</p>
   * <p>In many cases, tests will actually call a Reporter, which
   * acts as a composite for Logger objects, and includes numerous
   * useful utility and convenience methods.</p>
   * <ul>Loggers explicitly have a restricted set of logging calls for
   * two main reasons:
   * <li>To help keep tests structured</li>
   * <li>To make it easier to generate 'reports' based on test output
   * (i.e. number of tests passed/failed, graphs of results, etc.)</li>
   * </ul>
   * @author Shane_Curcuru@lotus.com
   * @version $Id: Logger.java,v 1.1 2000/11/01 23:30:51 curcuru Exp $
   * @todo maybe add set/getOutput methods? Would allow most
   * types of loggers to get output streams, etc. externally
   */
  public interface Logger
  {
  
      //-----------------------------------------------------
      //-------- Constants for common input params --------
      //-----------------------------------------------------
  
      /**
       * Parameter: FQCN of Logger(s) to use.
       * <p>Default: usually none, but implementers may choose to call
       * setupDefaultLogger(). Will accept multiple classnames separated
       * by ";" semicolon. Format:
       * <code>-reporters org.apache.qetest.ConsoleLogger;org.apache.qetest.SomeOtherLogger</code></p>
       */
      public static final String OPT_LOGGERS = "loggers";
  
      /** NEEDSDOC Field LOGGER_SEPARATOR          */
      public static final String LOGGER_SEPARATOR = ";";
  
      /**
       * A default Logger classname - ConsoleLogger.
       */
      public static final String DEFAULT_LOGGER =
          "org.apache.qetest.ConsoleLogger";
  
      /**
       * Parameter: level of output to log, int 0-99.
       */
      public static final String OPT_LOGGINGLEVEL = "loggingLevel";
  
      /**
       * Parameter: if we should log performance data, true/false.
       */
      public static final String OPT_PERFLOGGING = "perfLogging";
  
      /**
       * Parameter: if we should dump debugging info to System.err.
       */
      public static final String OPT_DEBUG = "debug";
  
      /**
       * Parameter: Name of test results file for file-based Loggers.
       * <p>File-based loggers should use this key as an initializer
       * for the name of their output file
       * Commandline Format: <code>-logFile path\to\ResultsFileName.ext</code>
       * Properties file Format: <code>logFile=path\\to\\ResultsFileName.ext</code></p>
       */
      public static final String OPT_LOGFILE = "logFile";
  
      /**
       * Parameter: Indent depth for console or HTML/XML loggers.
       * <p>Loggers may use this as an integer number of spaces to
       * indent, as applicable to their situation.
       * Commandline Format: <code>-indent <i>nn</i></code>
       * Properties file Format: <code>indent=<i>nn</i></code></p>
       */
      public static final String OPT_INDENT = "indent";
  
      //-----------------------------------------------------
      //-------- Constants for Logger and Reporter interactions --------
      //-----------------------------------------------------
  
      /**
       * This determines the amount of data actually logged out to results.
       * <p>Loggers merely use these constants in their output formats.
       * Reporters will only call contained Loggers to report messages
       * at the current logging level and higher.
       * For example, if you <code>setLoggingLevel(ERRORMSG)</code> then INFOMSGs
       * will not be reported, presumably speeding execution time and saving
       * output log space.  These levels are also coded into most Logger output,
       * allowing for easy reporting of various levels of results.</p>
       * <ul>Allowable values are:
       * <li>CRITICALMSG - Must-be-printed messages; may print only selected
       * fails (and skip printing most passes).</li>
       * <li>ERRORMSG - Logs an error and (optionally) a fail.</li>
       * <li>FAILSONLY - Skips logging out most pass messages (still
       * reports testFileResults) but prints out all fails.</li>
       * <li>WARNINGMSG - Used for non-fail warnings - the test will
       * continue, hopefully sucessfully.</li>
       * <li>STATUSMSG - Reports on basic status of the test, when you
       * want to include more detail than in a check() call</li>
       * <li>INFOMSG - For more basic script debugging messages.</li>
       * <li>TRACEMSG - Tracing all operations, detailed debugging information.</li>
       * </ul>
       * @see #logMsg(int, java.lang.String)
       */
      // Levels are separated in actual values in case you wish to add your own levels in between
      public static final int CRITICALMSG = 0;  // Lowest possible loggingLevel
  
      /** NEEDSDOC Field ERRORMSG          */
      public static final int ERRORMSG = 10;
  
      /** NEEDSDOC Field FAILSONLY          */
      public static final int FAILSONLY = 20;
  
      /** NEEDSDOC Field WARNINGMSG          */
      public static final int WARNINGMSG = 30;
  
      /** NEEDSDOC Field STATUSMSG          */
      public static final int STATUSMSG = 40;
  
      /** NEEDSDOC Field INFOMSG          */
      public static final int INFOMSG = 50;
  
      /** NEEDSDOC Field TRACEMSG          */
      public static final int TRACEMSG = 60;  // Highest possible loggingLevel
  
      /** NEEDSDOC Field DEFAULT_LOGGINGLEVEL          */
      public static final int DEFAULT_LOGGINGLEVEL = STATUSMSG;
  
      /**
       * Constants for tracking results by testcase or testfile.
       * <p>Testfiles default to an incomplete or INCP_RESULT.  If a
       * test never successfully calls a check* method, it's result
       * will be incomplete.</p>
       * <p>Note that a test cannot explicitly reset it's result to be INCP.</p>
       */
  
      // Note: implementations should never rely on the actual values
      //       of these constants, except possibly to ensure that 
      //       overriding values are > greater than other values
      public static final int INCP_RESULT = 0;
  
      // Note: string representations are explicitly set to all be 
      //       4 characters long to make it simpler to parse results
  
      /** NEEDSDOC Field INCP          */
      public static final String INCP = "Incp";
  
      /**
       * Constants for tracking results by testcase or testfile.
       * <p>A PASS_RESULT signifies that a specific test point (or a testcase,
       * or testfile) has perfomed an operation correctly and has been verified.</p>
       * <p>A pass overrides an incomplete.</p>
       * @see #checkPass(java.lang.String)
       */
      public static final int PASS_RESULT = 2;
  
      /** NEEDSDOC Field PASS          */
      public static final String PASS = "Pass";
  
      /**
       * Constants for tracking results by testcase or testfile.
       * <p>An AMBG_RESULT or ambiguous result signifies that a specific test
       * point (or a testcase, or testfile) has perfomed an operation but
       * that it has not been verified.</p>
       * <p>Ambiguous results can be used when the test may not have access
       * to baseline, or verified 'gold' result data.  It may also be used
       * during test file creation when the tester has not yet specified the
       * expected behavior of a test.</p>
       * <p>Ambiguous overrides both pass and incomplete.</p>
       * @see #checkAmbiguous(java.lang.String)
       */
      public static final int AMBG_RESULT = 5;
  
      /** NEEDSDOC Field AMBG          */
      public static final String AMBG = "Ambg";
  
      /**
       * Constants for tracking results by testcase or testfile.
       * <p>A FAIL_RESULT signifies that a specific test point (or a testcase,
       * or testfile) has perfomed an operation incorrectly.</p>
       * <p>A fail in one test point does not necessarily mean that other test
       * points are invalid - well written tests should be able to continue
       * and produce valid results for the rest of the test file.</p>
       * <p>A fail overrides any of incomplete, pass or ambiguous.</p>
       * @see #checkFail(java.lang.String)
       */
      public static final int FAIL_RESULT = 8;
  
      /** NEEDSDOC Field FAIL          */
      public static final String FAIL = "Fail";
  
      /**
       * Constants for tracking results by testcase or testfile.
       * <p>An ERRR_RESULT signifies that some part of the testfile
       * has caused an unexpected error, exception, or other Really Bad Thing.</p>
       * <p>Errors signify that something unexpected happened and that the test
       * may not produce valid results.  It would most commonly be used for
       * problems relating to setting up test data or errors with other software
       * being used (i.e. not problems with the actual software code that the
       * test is attempting to verify).</p>
       * <p>An error overrides <B>any</B> other result.</p>
       * @see #checkErr(java.lang.String)
       */
      public static final int ERRR_RESULT = 9;
  
      /** NEEDSDOC Field ERRR          */
      public static final String ERRR = "Errr";
  
      /**
       * Testfiles and testcases should default to incomplete.
       */
      public final int DEFAULT_RESULT = INCP_RESULT;
  
      //-----------------------------------------------------
      //-------- Control and utility routines --------
      //-----------------------------------------------------
  
      /**
       * Return a description of what this Logger/Reporter does.
       * @author Shane_Curcuru@lotus.com
       * @return description of how this Logger outputs results, OR
       * how this Reporter uses Loggers, etc..
       */
      public abstract String getDescription();
  
      /**
       * Returns information about the Property name=value pairs that
       * are understood by this Logger/Reporter.
       * @author Shane_Curcuru@lotus.com
       * @return same as {@link java.applet.Applet.getParameterInfo}.
       */
      public abstract String[][] getParameterInfo();
  
      /**
       * Accessor methods for a properties block.
       * @return our Properties block.
       */
      public abstract Properties getProperties();
  
      /**
       * Accessor methods for a properties block.
       * Always having a Properties block allows users to pass common
       * options to a Logger/Reporter without having to know the specific
       * 'properties' on the object.
       * <p>Much like in Applets, users can call getParameterInfo() to
       * find out what kind of properties are available.  Callers more
       * commonly simply call initialize(p) instead of setProperties(p)</p>
       * @author Shane_Curcuru@lotus.com
       * @param p Properties to set (should be cloned).
       */
      public abstract void setProperties(Properties p);
  
      /**
       * Call once to initialize this Logger/Reporter from Properties.
       * <p>Simple hook to allow Logger/Reporters with special output
       * items to initialize themselves.</p>
       * @author Shane_Curcuru@lotus.com
       * @param Properties block to initialize from.
       * @param status, true if OK, false if an error occoured.
       *
       * NEEDSDOC @param p
       *
       * NEEDSDOC ($objectName$) @return
       */
      public abstract boolean initialize(Properties p);
  
      /**
       * Is this Logger/Reporter ready to log results?
       * @author Shane_Curcuru@lotus.com
       * @return status - true if it's ready to report, false otherwise
       */
      public abstract boolean isReady();
  
      /**
       * Is this Logger/Reporter still running OK?
       * Note that a'la java.io.PrintWriter, this class should never
       * throw exceptions.  It will merely quietly fail and set this
       * error flag when something bad happens.
       * <p>Note this may have slightly different meanings for various
       * types of Loggers (file-based, network-based, etc.) or for
       * Reporters (which may still be able to function if only one
       * of their Loggers has an error)
       * @author Shane_Curcuru@lotus.com
       * @return status - true if an error has occoured, false if it's OK
       */
      public abstract boolean checkError();
  
      /**
       * Flush this Logger/Reporter - should ensure all output is flushed.
       * Note that the flush operation is not necessarily pertinent to
       * all types of Logger/Reporter - console-type Loggers no-op this.
       * @author Shane_Curcuru@lotus.com
       */
      public abstract void flush();
  
      /**
       * Close this Logger/Reporter - should include closing any OutputStreams, etc.
       * Logger/Reporters should return isReady() = false after closing.
       * @author Shane_Curcuru@lotus.com
       */
      public abstract void close();
  
      //-----------------------------------------------------
      //-------- Testfile / Testcase start and stop routines --------
      //-----------------------------------------------------
  
      /**
       * Report that a testfile has started.
       * Implementing Loggers must output/store/report a message
       * that the test file has started.
       * @author Shane_Curcuru@lotus.com
       * @param name file name or tag specifying the test.
       * @param comment comment about the test.
       */
      public abstract void testFileInit(String name, String comment);
  
      /**
       * Report that a testfile has finished, and report it's result.
       * Implementing Loggers must output a message that the test is
       * finished, and print the results.
       * @author Shane_Curcuru@lotus.com
       * @param msg message to log out
       * @param result result of testfile
       */
      public abstract void testFileClose(String msg, String result);
  
      /**
       * Report that a testcase has started.
       * @author Shane_Curcuru@lotus.com
       * @param comment short description of this test case's objective.
       */
      public abstract void testCaseInit(String comment);
  
      /**
       * Report that a testcase has finished, and report it's result.
       * Implementing classes must output a message that a testcase is
       * finished, and print the results.
       * @author Shane_Curcuru@lotus.com
       * @param msg message of name of test case to log out
       * @param result result of testfile
       */
      public abstract void testCaseClose(String msg, String result);
  
      //-----------------------------------------------------
      //-------- Test results logging routines --------
      //-----------------------------------------------------
  
      /**
       * Report a comment to result file with specified severity.
       * Print out the message, optionally along with the level (depends
       * on your storage mechanisim: console output probably doesn't need
       * the level, but a file output probably would want it.)
       * <p>Note that some Loggers may limit the comment string,
       * either in overall length or by stripping any linefeeds, etc.
       * This is to allow for optimization of file or database-type
       * reporters with fixed fields.  Users who need to log out
       * special string data should use logArbitrary() instead.</p>
       * <p>Remember, use {@link #checkPass(String)}, or
       * {@link #checkFail(String)}, etc. to report the actual
       * results of your tests.</p>
       * @author Shane_Curcuru@lotus.com
       * @param level severity of message.
       * @param msg comment to log out.
       */
      public abstract void logMsg(int level, String msg);
  
      /**
       * Report an arbitrary String to result file with specified severity.
       * Log out the String provided exactly as-is.
       * @author Shane_Curcuru@lotus.com
       * @param level severity or class of message.
       * @param msg arbitrary String to log out.
       */
      public abstract void logArbitrary(int level, String msg);
  
      /**
       * Logs out statistics to result file with specified severity.
       * This is a general-purpose way to log out numeric statistics.  We accept
       * both a long and a double to allow users to save whatever kind of numbers
       * they need to, with the simplest API.  The actual meanings of the numbers
       * are dependent on the implementer.
       * @author Shane_Curcuru@lotus.com
       * @param level severity of message.
       * @param lVal statistic in long format.
       * @param dVal statistic in double format.
       * @param msg comment to log out.
       */
      public abstract void logStatistic(int level, long lVal, double dVal,
                                        String msg);
  
      /**
       * Logs out a element to results with specified severity.
       * This method is primarily for Loggers that output to fixed
       * structures, like files, XML data, or databases.
       * @author Shane_Curcuru@lotus.com
       * @param level severity of message.
       * @param element name of enclosing element
       * @param attrs hash of name=value attributes
       * @param msg Object to log out; up to Loggers to handle
       * processing of this; usually logs just .toString().
       */
      public abstract void logElement(int level, String element,
                                      Hashtable attrs, Object msg);
  
      /**
       * Logs out contents of a Hashtable with specified severity.
       * <p>Loggers should store or log the full contents of the hashtable.</p>
       * @param level severity or class of message.
       * @param hash Hashtable to log the contents of.
       * @param msg decription of the Hashtable.
       */
      public abstract void logHashtable(int level, Hashtable hash, String msg);
  
      //-----------------------------------------------------
      //-------- Test results reporting check* routines --------
      //-----------------------------------------------------
      // There is no public void checkIncp(String comment) method
  
      /**
       * Writes out a Pass record with comment.
       * @author Shane_Curcuru@lotus.com
       * @param comment comment to log with the pass record.
       */
      public abstract void checkPass(String comment);
  
      /**
       * Writes out an ambiguous record with comment.
       * @author Shane_Curcuru@lotus.com
       * @param comment to log with the ambg record.
       */
      public abstract void checkAmbiguous(String comment);
  
      /**
       * Writes out a Fail record with comment.
       * @author Shane_Curcuru@lotus.com
       * @param comment comment to log with the fail record.
       */
      public abstract void checkFail(String comment);
  
      /**
       * Writes out an Error record with comment.
       * @author Shane_Curcuru@lotus.com
       * @param comment comment to log with the error record.
       */
      public abstract void checkErr(String comment);
  }  // end of class Logger
  
  
  
  
  1.1                  xml-xalan/test/java/src/org/apache/qetest/OutputNameManager.java
  
  Index: OutputNameManager.java
  ===================================================================
  /*
   * The Apache Software License, Version 1.1
   *
   *
   * Copyright (c) 2000 The Apache Software Foundation.  All rights 
   * reserved.
   *
   * Redistribution and use in source and binary forms, with or without
   * modification, are permitted provided that the following conditions
   * are met:
   *
   * 1. Redistributions of source code must retain the above copyright
   *    notice, this list of conditions and the following disclaimer. 
   *
   * 2. Redistributions in binary form must reproduce the above copyright
   *    notice, this list of conditions and the following disclaimer in
   *    the documentation and/or other materials provided with the
   *    distribution.
   *
   * 3. The end-user documentation included with the redistribution,
   *    if any, must include the following acknowledgment:  
   *       "This product includes software developed by the
   *        Apache Software Foundation (http://www.apache.org/)."
   *    Alternately, this acknowledgment may appear in the software itself,
   *    if and wherever such third-party acknowledgments normally appear.
   *
   * 4. The names "Xalan" and "Apache Software Foundation" must
   *    not be used to endorse or promote products derived from this
   *    software without prior written permission. For written 
   *    permission, please contact apache@apache.org.
   *
   * 5. Products derived from this software may not be called "Apache",
   *    nor may "Apache" appear in their name, without prior written
   *    permission of the Apache Software Foundation.
   *
   * THIS SOFTWARE IS PROVIDED ``AS IS'' AND ANY EXPRESSED OR IMPLIED
   * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
   * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
   * DISCLAIMED.  IN NO EVENT SHALL THE APACHE SOFTWARE FOUNDATION OR
   * ITS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
   * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
   * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF
   * USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
   * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
   * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
   * OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
   * SUCH DAMAGE.
   * ====================================================================
   *
   * This software consists of voluntary contributions made by many
   * individuals on behalf of the Apache Software Foundation and was
   * originally based on software copyright (c) 2000, Lotus
   * Development Corporation., http://www.lotus.com.  For more
   * information on the Apache Software Foundation, please see
   * <http://www.apache.org/>.
   */
  
  /*
   *
   * OutputNameManager.java
   *
   */
  package org.apache.qetest;
  
  /**
   * Simple utility class to manage tests with multiple output names.
   * <p>Starts with a base name and extension, and returns
   * nextName()s like:<br>
   * baseName_1.ext<br>
   * baseName_2.ext<br>
   * baseName_3.ext<br>
   * ...<br>
   * @author Shane_Curcuru@lotus.com
   * @version $Id: OutputNameManager.java,v 1.1 2000/11/01 23:30:51 curcuru Exp $
   */
  public class OutputNameManager
  {
  
      // defaults are provided for everything for the terminally lazy
  
      /** NEEDSDOC Field extension          */
      protected String extension = ".out";
  
      /** NEEDSDOC Field baseName          */
      protected String baseName = "OutputFile";
  
      /** NEEDSDOC Field currentName          */
      protected String currentName = "currentUnset";
  
      /** NEEDSDOC Field previousName          */
      protected String previousName = "previousUnset";
  
      /** NEEDSDOC Field counter          */
      protected int counter = 0;
  
      /** NEEDSDOC Field SEPARATOR          */
      public static final String SEPARATOR = "_";
  
      /**
       * Construct with just a basename.  
       *
       * NEEDSDOC @param base
       */
      public OutputNameManager(String base)
      {
          baseName = base;
      }
  
      /**
       * Construct with a basename and extension.  
       *
       * NEEDSDOC @param base
       * NEEDSDOC @param ext
       */
      public OutputNameManager(String base, String ext)
      {
          baseName = base;
          extension = ext;
      }
  
      /**
       * Construct with a basename, extension, and set the counter.  
       *
       * NEEDSDOC @param base
       * NEEDSDOC @param ext
       * NEEDSDOC @param ctr
       */
      public OutputNameManager(String base, String ext, int ctr)
      {
  
          baseName = base;
          extension = ext;
  
          setCounter(ctr);
      }
  
      /** Reset the counter to zero and update current, previous names. */
      public void reset()
      {
  
          previousName = currentName;
          currentName = null;
          counter = 0;  // Set to 0 since we always call nextOutName() first
      }
  
      /**
       * Increment counter and get next name.  
       *
       * NEEDSDOC ($objectName$) @return
       */
      public String nextName()
      {
  
          setCounter(counter++);  // Updates names
  
          return currentName();
      }
  
      /**
       * Just get the current name.  
       *
       * NEEDSDOC ($objectName$) @return
       */
      public String currentName()
      {
          return currentName;
      }
  
      /**
       * Get the previous name, even past a reset().  
       *
       * NEEDSDOC ($objectName$) @return
       */
      public String previousName()
      {
          return previousName;
      }
  
      /**
       * Get the current counter number.  
       *
       * NEEDSDOC ($objectName$) @return
       */
      public int currentCounter()
      {
          return counter;
      }
  
      /**
       * Set the current counter number, including names.  
       *
       * NEEDSDOC @param ctr
       */
      public void setCounter(int ctr)
      {
  
          counter = ctr;
          previousName = currentName;
          currentName = baseName + SEPARATOR + counter + extension;
      }
  }  // end of class OutputNameManager
  
  
  
  
  1.1                  xml-xalan/test/java/src/org/apache/qetest/package.html
  
  Index: package.html
  ===================================================================
  <html>
    <title>XSL-TEST Reporter package.</title>
    <body>
      <p>This package is an independent framework for writing automated test scripts in Java.</p>  
      <dl>
        <dt><b>Author: </b></dt><dd><a href="mailto:shane_curcuru@lotus.com">Shane_Curcuru@lotus.com</a></dd>
        <dt><b>Program(s) Under Test: </b></dt>
        <dd><a href="http://xml.apache.org/xalan-j" target="_top">Xalan-J 2.x XSLT Processor</a></dd>
        <dd><a href="http://xml.apache.org/xalan" target="_top">Xalan-J 1.x XSLT Processor</a></dd>
        <dd><a href="http://xml.apache.org/xalan-c" target="_top">Xalan-C 1.x XSLT Processor</a></dd>
        <dt><b>Goals: </b></dt><dd>
          <ul>
            <li>Provide a solid, independent test framework.</li>
            <li>Encourage good testing/verification practices.</li>
            <li>Enable quicker generation of Xalan test cases.</li>
            <li>Simplify maintenance of test cases.</li>
            <li>Provide basic test results analysis frameworks.</li>
          </ul>
        </dd>
      </dl>
      <p>This package is primarily focused on the quality 
      engineer, and system or integration level tests that are to be 
      shared with a larger audience, rather than on a developer who 
      writes unit tests primarily for their own use.</p>
      <ul>A few of the basic design patterns/principles used:
      <li>Most objects can be initialized either through their 
      constructor or an initialize() method with a Properties 
      block of name=value pairs to setup their internal state 
      from.  Composite objects will typically pass their entire 
      Properties block to sub-objects or contained objects for 
      their own initializations.  One future drawback: need to 
      ensure the namespace doesn't have collisions between tests, 
      reporters, and loggers.</li>
      <li>Test, TestImpl, FileBasedTest: these all provide structure 
      and utility methods useful for testing in general.</li>
      <li>User subclasses of the Test classes should simply focus on 
      manipulating the product under test and calling log*() or check*() 
      methods to report information.  They shouldn't worry about the 
      external environment or managing their reporter unless they have 
      a specific reason to.</li>
      <li>Loggers simply provide a mechanisim to output data in a manner 
      so that the test doesn't have to manage the output at all.  They 
      ensure that all tests produce output in a common format, making it 
      easier to evaluate test results across many tests or products.  
      Loggers generally don't keep track of the test's result state, 
      relying on the user to analyze the result set later.</li>
      <li>Reporters act as a composite of Loggers, as well as providing 
      various useful utilities.  Reporters also keep a running track of 
      the pass/fail state of a Test during execution, as well as reporting 
      it out using their Loggers.</li>
      <li>CheckService is a generic service for checking 'equivalence' 
      of two objects and reporting the pass/fail/other result thereof.  
      A SimpleFileCheckService implementation is provided as an 
      example.</li>
      <li>OutputNameManager is a cheap-o helper for tests that create 
      a large number of consecutive output files.</li>
      <li>TestfileInfo is a simple data-holding class to store info 
      about a test data file.  It is used in FileBasedTest, which may 
      be a useful base class for your tests.</li>
      </ul>
    </body>
  </html>
  
  
  
  
  
  1.1                  xml-xalan/test/java/src/org/apache/qetest/Reporter.java
  
  Index: Reporter.java
  ===================================================================
  /*
   * The Apache Software License, Version 1.1
   *
   *
   * Copyright (c) 2000 The Apache Software Foundation.  All rights 
   * reserved.
   *
   * Redistribution and use in source and binary forms, with or without
   * modification, are permitted provided that the following conditions
   * are met:
   *
   * 1. Redistributions of source code must retain the above copyright
   *    notice, this list of conditions and the following disclaimer. 
   *
   * 2. Redistributions in binary form must reproduce the above copyright
   *    notice, this list of conditions and the following disclaimer in
   *    the documentation and/or other materials provided with the
   *    distribution.
   *
   * 3. The end-user documentation included with the redistribution,
   *    if any, must include the following acknowledgment:  
   *       "This product includes software developed by the
   *        Apache Software Foundation (http://www.apache.org/)."
   *    Alternately, this acknowledgment may appear in the software itself,
   *    if and wherever such third-party acknowledgments normally appear.
   *
   * 4. The names "Xalan" and "Apache Software Foundation" must
   *    not be used to endorse or promote products derived from this
   *    software without prior written permission. For written 
   *    permission, please contact apache@apache.org.
   *
   * 5. Products derived from this software may not be called "Apache",
   *    nor may "Apache" appear in their name, without prior written
   *    permission of the Apache Software Foundation.
   *
   * THIS SOFTWARE IS PROVIDED ``AS IS'' AND ANY EXPRESSED OR IMPLIED
   * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
   * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
   * DISCLAIMED.  IN NO EVENT SHALL THE APACHE SOFTWARE FOUNDATION OR
   * ITS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
   * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
   * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF
   * USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
   * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
   * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
   * OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
   * SUCH DAMAGE.
   * ====================================================================
   *
   * This software consists of voluntary contributions made by many
   * individuals on behalf of the Apache Software Foundation and was
   * originally based on software copyright (c) 2000, Lotus
   * Development Corporation., http://www.lotus.com.  For more
   * information on the Apache Software Foundation, please see
   * <http://www.apache.org/>.
   */
  
  /*
   *
   * Reporter.java
   *
   */
  package org.apache.qetest;
  
  import java.io.PrintWriter;
  import java.io.StringWriter;
  
  import java.lang.reflect.Constructor;
  import java.lang.reflect.Method;
  import java.lang.reflect.InvocationTargetException;
  
  import java.util.Hashtable;
  import java.util.Properties;
  import java.util.StringTokenizer;
  
  /**
   * Class defining how a test can report results including convenience methods.
   * <p>Tests generally interact with a Reporter, which turns around to call
   * a Logger to actually store the results.  The Reporter serves as a
   * single funnel for all results, hiding both the details and number of
   * actual loggers that might currently be turned on (file, screen, network,
   * etc.) from the test that created us.</p>
   * <p>Note that Reporter adds numerous convenience methods that, while they
   * are not strictly necessary to express a test's results, make coding
   * tests much easier.  Reporter is designed to be subclassed for your
   * particular application; in general you only need to provide setup mechanisims
   * specific to your testing/product environment.</p>
   * @todo all methods should check that available loggers are OK
   * @todo explain better how results are rolled up and calculated
   * @author Shane_Curcuru@lotus.com
   * @author Jo_Grant@lotus.com
   * @version $Id: Reporter.java,v 1.1 2000/11/01 23:30:51 curcuru Exp $
   */
  public class Reporter implements Logger
  {
  
      /**
       * Constructor calls initialize(p).
       * @param p Properties block to initialize us with.
       */
      public Reporter(Properties p)
      {
          ready = initialize(p);
      }
  
      /** If we're ready to start outputting yet. */
      protected boolean ready = false;
  
      //-----------------------------------------------------
      //-------- Implement Logger Control and utility routines --------
      //-----------------------------------------------------
  
      /**
       * Return a description of what this Logger/Reporter does.
       * @author Shane_Curcuru@lotus.com
       * @return description of how this Logger outputs results, OR
       * how this Reporter uses Loggers, etc..
       */
      public String getDescription()
      {
          return "Reporter: default reporter implementation";
      }
  
      /**
       * Returns information about the Property name=value pairs that
       * are understood by this Logger/Reporter.
       * @author Shane_Curcuru@lotus.com
       * @return same as {@link java.applet.Applet.getParameterInfo}.
       */
      public String[][] getParameterInfo()
      {
  
          String pinfo[][] =
          {
              { OPT_LOGGERS, "String", "FQCN of Loggers to add" },
              { OPT_LOGFILE, "String",
                "Name of file to use for file-based Logger output" },
              { OPT_LOGGINGLEVEL, "int",
                "to setLoggingLevel() to control amount of output" },
              { OPT_PERFLOGGING, "boolean",
                "if we should log performance data as well" },
              { OPT_INDENT, "int",
                "number of spaces to indent for supporting Loggers" },
              { OPT_DEBUG, "boolean", "generic debugging flag" }
          };
  
          return pinfo;
      }
  
      /**
       * Accessor methods for a properties block.
       * @return our Properties block.
       * @todo should this clone first?
       */
      public Properties getProperties()
      {
          return reporterProps;
      }
  
      /**
       * Accessor methods for a properties block.
       * Always having a Properties block allows users to pass common
       * options to a Logger/Reporter without having to know the specific
       * 'properties' on the object.
       * <p>Much like in Applets, users can call getParameterInfo() to
       * find out what kind of properties are available.  Callers more
       * commonly simply call initialize(p) instead of setProperties(p)</p>
       * @author Shane_Curcuru@lotus.com
       * @param p Properties to set (should be cloned).
       */
      public void setProperties(Properties p)
      {
          if (p != null)
              reporterProps = (Properties) p.clone();
      }
  
      /**
       * Call once to initialize this Logger/Reporter from Properties.
       * <p>Simple hook to allow Logger/Reporters with special output
       * items to initialize themselves.</p>
       * @author Shane_Curcuru@lotus.com
       * @param Properties block to initialize from.
       * @param status, true if OK, false if an error occoured.
       *
       * NEEDSDOC @param p
       *
       * NEEDSDOC ($objectName$) @return
       */
      public boolean initialize(Properties p)
      {
  
          setProperties(p);
  
          String dbg = reporterProps.getProperty(OPT_DEBUG);
  
          if ((dbg != null) && dbg.equalsIgnoreCase("true"))
          {
              setDebug(true);
          }
  
          String perf = reporterProps.getProperty(OPT_PERFLOGGING);
  
          if ((perf != null) && perf.equalsIgnoreCase("true"))
          {
              setPerfLogging(true);
          }
  
          // int values need to be parsed
          String logLvl = reporterProps.getProperty(OPT_LOGGINGLEVEL);
  
          if (logLvl != null)
          {
              try
              {
                  setLoggingLevel(Integer.parseInt(logLvl));
              }
              catch (NumberFormatException numEx)
              { /* no-op */
              }
          }
  
          // Add however many loggers are askedfor
          boolean b = true;
          StringTokenizer st =
              new StringTokenizer(reporterProps.getProperty(OPT_LOGGERS),
                                  LOGGER_SEPARATOR);
          int i;
  
          for (i = 0; st.hasMoreTokens(); i++)
          {
              String temp = st.nextToken();
  
              if ((temp != null) && (temp.length() > 1))
              {
                  b &= addLogger(temp, reporterProps);
              }
          }
  
          return true;
      }
  
      /**
       * Is this Logger/Reporter ready to log results?
       * @author Shane_Curcuru@lotus.com
       * @return status - true if it's ready to report, false otherwise
       * @todo should we check our contained Loggers for their status?
       */
      public boolean isReady()
      {
          return ready;
      }
  
      /**
       * Is this Logger/Reporter still running OK?
       * @author Shane_Curcuru@lotus.com
       * @return status - true if an error has occoured, false if it's OK
       * @todo should we check our contained Loggers for their status?
       */
      public boolean checkError()
      {
          return false;
      }
  
      /**
       * Flush this Logger/Reporter - should ensure all output is flushed.
       * Note that the flush operation is not necessarily pertinent to
       * all types of Logger/Reporter - console-type Loggers no-op this.
       * @author Shane_Curcuru@lotus.com
       */
      public void flush()
      {
  
          for (int i = 0; i < numLoggers; i++)
          {
              loggers[i].flush();
          }
      }
  
      /**
       * Close this Logger/Reporter - should include closing any OutputStreams, etc.
       * Logger/Reporters should return isReady() = false after closing.
       * @author Shane_Curcuru@lotus.com
       */
      public void close()
      {
  
          for (int i = 0; i < numLoggers; i++)
          {
              loggers[i].close();
          }
      }
  
      /**
       * Generic properties for this Reporter.
       * <p>Use a Properties block to make it easier to add new features
       * and to be able to pass data to our loggers.  Any properties that
       * we recognize will be set here, and the entire block will be passed
       * to any loggers that we control.</p>
       */
      protected Properties reporterProps = new Properties();
  
      /**
       * This determines the amount of data actually logged out to results.
       * <p>Setting this higher will result in more data being logged out.
       * Values range from Reporter.CRITICALMSG (0) to TRACEMSG (60).
       * For non-performance-critical testing, you may wish to set this high,
       * so all data gets logged, and then use reporting tools on the test output
       * to filter for human use (since the appropriate level is stored with
       * every logMsg() call)</p>
       * @see #logMsg(int, java.lang.String)
       */
      protected int loggingLevel = DEFAULT_LOGGINGLEVEL;
  
      /**
       * Marker that a testcase is currently running.
       * <p>NEEDSWORK: should do a better job of reporting results in cases
       * where users might not call testCaseInit/testCaseClose in non-nested pairs.</p>
       */
      protected boolean duringTestCase = false;
  
      /**
       * Flag if we should force loggers closed upon testFileClose.
       * <p>Default: true.  Standalone tests can leave this alone.
       * Test Harnesses may want to reset this so they can have multiple
       * file results in one actual output 'file' for file-based loggers.</p>
       */
      protected boolean closeOnFileClose = true;
  
      /**
       * Accessor method for closeOnFileClose.  
       *
       * NEEDSDOC ($objectName$) @return
       */
      public boolean getCloseOnFileClose()
      {
          return closeOnFileClose;
      }
  
      /**
       * Accessor method for closeOnFileClose.  
       *
       * NEEDSDOC @param b
       */
      public void setCloseOnFileClose(boolean b)
      {
          closeOnFileClose = b;
      }
  
      //-----------------------------------------------------
      //-------- Test results computation members and methods --------
      //-----------------------------------------------------
  
      /** Name of the current test. */
      protected String testName;
  
      /** Description of the current test. */
      protected String testComment;
  
      /** Number of current case within a test, usually automatically calculated. */
      protected int caseNum;
  
      /** Description of current case within a test. */
      protected String caseComment;
  
      /** Overall test result of current test, automatically calculated. */
      protected int testResult;
  
      /** Overall test result of current testcase, automatically calculated. */
      protected int caseResult;
  
      /**
       * Counters for overall number of results - passes, fails, etc.
       * @todo update this if we use TestResult objects
       */
      protected static final int FILES = 0;
  
      /** NEEDSDOC Field CASES          */
      protected static final int CASES = 1;
  
      /** NEEDSDOC Field CHECKS          */
      protected static final int CHECKS = 2;
  
      /** NEEDSDOC Field MAX_COUNTERS          */
      protected static final int MAX_COUNTERS = CHECKS + 1;
  
      /**
       * Counters for overall number of results - passes, fails, etc.
       * @todo update this if we use TestResult objects
       */
      protected int[] incpCount = new int[MAX_COUNTERS];
  
      /** NEEDSDOC Field passCount          */
      protected int[] passCount = new int[MAX_COUNTERS];
  
      /** NEEDSDOC Field ambgCount          */
      protected int[] ambgCount = new int[MAX_COUNTERS];
  
      /** NEEDSDOC Field failCount          */
      protected int[] failCount = new int[MAX_COUNTERS];
  
      /** NEEDSDOC Field errrCount          */
      protected int[] errrCount = new int[MAX_COUNTERS];
  
      //-----------------------------------------------------
      //-------- Composite Pattern Variables And Methods --------
      //-----------------------------------------------------
  
      /**
       * Optimization: max number of loggers, stored in an array.
       * <p>This is a design decision: normally, you might use a ConsoleReporter,
       * some sort of file-based one, and maybe a network-based one.</p>
       */
      protected int MAX_LOGGERS = 3;
  
      /**
       * Array of loggers to whom we pass results.
       * <p>Store our loggers in an array for optimization, since we want
       * logging calls to take as little time as possible.</p>
       */
      protected Logger[] loggers = new Logger[MAX_LOGGERS];
  
      /** NEEDSDOC Field numLoggers          */
      protected int numLoggers = 0;
  
      /**
       * Add a new Logger to our array, optionally initializing it with Properties.
       * <p>Store our Loggers in an array for optimization, since we want
       * logging calls to take as little time as possible.</p>
       * @todo enable users to add more than MAX_LOGGERS
       * @author Gang Of Four
       * @param rName fully qualified class name of Logger to add.
       * @param p (optional) Properties block to initialize the Logger with.
       * @return status - true if successful, false otherwise.
       */
      public boolean addLogger(String rName, Properties p)
      {
  
          if ((rName == null) || (rName.length() < 1))
              return false;
  
          debugPrintln("addLogger(" + numLoggers + ", " + rName + " ...)");
  
          if ((numLoggers + 1) > loggers.length)
          {
  
              // @todo enable users to add more than MAX_LOGGERS
              return false;
          }
  
          // Attempt to add Logger to our list
          Class rClass;
          Constructor rCtor;
  
          try
          {
              rClass = Class.forName(rName);
  
              debugPrintln("rClass is " + rClass.toString());
  
              if (p == null)
  
              // @todo should somehow pass along our own props as well
              // Need to ensure Reporter and callers of this method always 
              //  coordinate the initialization of the Loggers we hold
              {
                  loggers[numLoggers] = (Logger) rClass.newInstance();
              }
              else
              {
                  Class[] parameterTypes = new Class[1];
  
                  parameterTypes[0] = java.util.Properties.class;
                  rCtor = rClass.getConstructor(parameterTypes);
  
                  Object[] initArgs = new Object[1];
  
                  initArgs[0] = (Object) p;
                  loggers[numLoggers] = (Logger) rCtor.newInstance(initArgs);
              }
          }
          catch (Exception e)
          {
  
              // @todo should we inform user why it failed?
              // Note: the logMsg may fail since we might not have any reporters at this point!
              debugPrintln("addLogger exception: " + e.toString());
              logCriticalMsg("addLogger exception: " + e.toString());
              logThrowable(CRITICALMSG, e, "addLogger exception:");
  
              return false;
          }
  
          // Increment counter for later use
          numLoggers++;
  
          return true;
      }
  
      /**
       * Return an Hashtable of all active Loggers.
       * @todo revisit; perhaps use a Vector
       * @reurns Hash of all active Loggers; null if none
       *
       * NEEDSDOC ($objectName$) @return
       */
      public Hashtable getLoggers()
      {
  
          // Optimization
          if (numLoggers == 0)
              return (null);
  
          Hashtable temp = new Hashtable();
  
          for (int i = 0; i < numLoggers; i++)
          {
              temp.put(loggers[i].getClass().getName(), loggers[i]);
          }
  
          return temp;
      }
  
      /**
       * Add the default Logger to this Reporter, whatever it is.
       * <p>Only adds the Logger if numLoggers <= 0; if the user has already
       * setup another Logger, this is a no-op (for the testwriter who doesn't
       * want the performance hit or annoyance of having Console output)</p>
       * @author Gang Of Four
       * @return status - true if successful, false otherwise.
       */
      public boolean addDefaultLogger()
      {
  
          // Optimization - return true, since they already have a logger
          if (numLoggers > 0)
              return true;
  
          return addLogger(DEFAULT_LOGGER, reporterProps);
      }
  
      //-----------------------------------------------------
      //-------- Testfile / Testcase start and stop routines --------
      //-----------------------------------------------------
  
      /**
       * Call once to initialize your Loggers for your test file.
       * Also resets test name, result, case results, etc.
       * <p>Currently, you must init/close your test file before init/closing
       * any test cases.  No checking is currently done to ensure that
       * mismatched test files are not nested.  This is an area that needs
       * design decisions and some work eventually to be a really clean design.</p>
       * <p>Not only do nested testfiles/testcases have implications for good
       * testing practices, they may also have implications for various Loggers,
       * especially XML or other ones with an implicit hierarcy in the reports.</p>
       * @author Shane_Curcuru@lotus.com
       * @param name file name or tag specifying the test.
       * @param comment comment about the test.
       */
      public void testFileInit(String name, String comment)
      {
  
          testName = name;
          testComment = comment;
          testResult = DEFAULT_RESULT;
          caseNum = 0;
          caseComment = null;
          caseResult = DEFAULT_RESULT;
          duringTestCase = false;
  
          for (int i = 0; i < numLoggers; i++)
          {
              loggers[i].testFileInit(testName, testComment);
          }
  
          // Log out time whole test script starts
          // Note there is a slight delay while logPerfMsg calls all reporters
          long t = System.currentTimeMillis();
  
          logPerfMsg(TEST_START, t, testName);
      }
  
      /**
       * Call once to close out your test and summate results.
       * <p>will close an open testCase before closing the file.  May also
       * force all Loggers closed if getCloseOnFileClose() (which may imply
       * that no more output will be logged to file-based reporters)</p>
       * @author Shane_Curcuru@lotus.com
       * @todo make this settable as to how/where the resultsCounters get output
       */
      public void testFileClose()
      {
  
          // Cache the time whole test script ends
          long t = System.currentTimeMillis();
  
          if (duringTestCase)
          {
  
              // Either user messed up (forgot to call testCaseClose) or something went wrong
              logErrorMsg("WARNING! testFileClose when duringTestCase=true!");
  
              // Force call to testCaseClose()
              testCaseClose();
          }
  
          // Actually log the time the test script ends after closing any potentially open testcases
          logPerfMsg(TEST_STOP, t, testName);
  
          // Increment our results counters 
          incrementResultCounter(FILES, testResult);
  
          // Print out an overall count of results by type
          // @todo make this settable as to how/where the resultsCounters get output
          logResultsCounters();
  
          // end this testfile - finish up any reporting we need to
          for (int i = 0; i < numLoggers; i++)
          {
  
              // Log we're done and then flush
              loggers[i].testFileClose(testComment, resultToString(testResult));
              loggers[i].flush();
  
              // Only close each reporter if asked to; this implies we're done
              //  and can't perform any more logging ourselves (or our reporters)
              if (getCloseOnFileClose())
              {
                  loggers[i].close();
              }
          }
  
          // Note: explicitly leave testResult, caseResult, etc. set for debugging
          //       purposes or for use by external test harnesses
      }
  
      /**
       * Implement Logger-only method.
       * <p>Here, a Reporter is simply acting as a logger: so don't
       * summate any results, do performance measuring, or anything
       * else, just pass the call through to our Loggers.
       * @param msg message to log out
       * @param result result of testfile
       */
      public void testFileClose(String msg, String result)
      {
  
          if (duringTestCase)
          {
  
              // Either user messed up (forgot to call testCaseClose) or something went wrong
              logErrorMsg("WARNING! testFileClose when duringTestCase=true!");
  
              // Force call to testCaseClose()
              testCaseClose();
          }
  
          // end this testfile - finish up any reporting we need to
          for (int i = 0; i < numLoggers; i++)
          {
  
              // Log we're done and then flush
              loggers[i].testFileClose(testComment, resultToString(testResult));
              loggers[i].flush();
  
              // Only close each reporter if asked to; this implies we're done
              //  and can't perform any more logging ourselves (or our reporters)
              if (getCloseOnFileClose())
              {
                  loggers[i].close();
              }
          }
      }
  
      /**
       * Call once to start each test case; logs out testcase number and your comment.
       * <p>Testcase numbers are calculated as integers incrementing from 1.  Will
       * also close any previously init'd but not closed testcase.</p>
       * @author Shane_Curcuru@lotus.com
       * @todo investigate tieing this to the actual testCase methodnames,
       * instead of blindly incrementing the counter
       * @param comment short description of this test case's objective.
       */
      public void testCaseInit(String comment)
      {
  
          if (duringTestCase)
          {
  
              // Either user messed up (forgot to call testCaseClose) or something went wrong
              logErrorMsg("WARNING! testCaseInit when duringTestCase=true!");
  
              // Force call to testCaseClose()
              testCaseClose();
          }
  
          caseNum++;
  
          caseComment = comment;
          caseResult = DEFAULT_RESULT;
  
          for (int i = 0; i < numLoggers; i++)
          {
              loggers[i].testCaseInit(String.valueOf(caseNum) + " "
                                      + caseComment);
          }
  
          duringTestCase = true;
  
          // Note there is a slight delay while logPerfMsg calls all reporters
          long t = System.currentTimeMillis();
  
          logPerfMsg(CASE_START, t, caseComment);
      }
  
      /**
       * Call once to end each test case and sub-summate results.
       * @author Shane_Curcuru@lotus.com
       */
      public void testCaseClose()
      {
  
          long t = System.currentTimeMillis();
  
          logPerfMsg(CASE_STOP, t, caseComment);
  
          if (!duringTestCase)
          {
              logErrorMsg("WARNING! testCaseClose when duringTestCase=false!");
  
              // Force call to testCaseInit()
              // NEEDSWORK: should we really do this?  This ensures any results
              //            are well-formed, however a user might not expect this.
              testCaseInit("WARNING! testCaseClose when duringTestCase=false!");
          }
  
          duringTestCase = false;
          testResult = java.lang.Math.max(testResult, caseResult);
  
          // Increment our results counters 
          incrementResultCounter(CASES, caseResult);
  
          for (int i = 0; i < numLoggers; i++)
          {
              loggers[i].testCaseClose(
                  String.valueOf(caseNum) + " " + caseComment,
                  resultToString(caseResult));
          }
      }
  
      /**
       * Implement Logger-only method.
       * <p>Here, a Reporter is simply acting as a logger: so don't
       * summate any results, do performance measuring, or anything
       * else, just pass the call through to our Loggers.
       * @param msg message of name of test case to log out
       * @param result result of testfile
       */
      public void testCaseClose(String msg, String result)
      {
  
          if (!duringTestCase)
          {
              logErrorMsg("WARNING! testCaseClose when duringTestCase=false!");
  
              // Force call to testCaseInit()
              // NEEDSWORK: should we really do this?  This ensures any results
              //            are well-formed, however a user might not expect this.
              testCaseInit("WARNING! testCaseClose when duringTestCase=false!");
          }
  
          duringTestCase = false;
  
          for (int i = 0; i < numLoggers; i++)
          {
              loggers[i].testCaseClose(
                  String.valueOf(caseNum) + " " + caseComment,
                  resultToString(caseResult));
          }
      }
  
      /**
       * Calls back into a Test to run test cases in order.
       * <p>Use reflection to call back and execute each testCaseXX method
       * in the calling test in order, catching exceptions along the way.</p>
       * @author Shane Curcuru
       * @param testObject the test object itself.
       * @param numTestCases number of consecutively numbered test cases to execute.
       * @param options (future use: options to pass to testcases)
       * @return status, true if OK, false if big bad error occoured
       */
      public boolean executeTests(Test testObject, int numTestCases,
                                  Object options)
      {
  
          // Flag denoting if we've had any errors
          boolean gotException = false;
  
          // Declare all needed java variables
          String tmpErrString = "executeTests: no errors yet";
          Object noArgs[] = new Object[0];  // use options instead
          Class noParams[] = new Class[0];
          Method currTestCase;
          Class testClass;
  
          // Get class reference for the test applet itself
          testClass = testObject.getClass();
  
          logTraceMsg("executeTests: running " + numTestCases + " tests now.");
  
          for (int tcNum = 1; tcNum <= numTestCases; tcNum++)
          {
              try
              {  // get a reference to the next test case that we'll be calling
                  tmpErrString = "executeTests: No such method: testCase"
                                 + tcNum + "()";
                  currTestCase = testClass.getMethod("testCase" + tcNum,
                                                     noParams);
  
                  // Now directly invoke that test case
                  tmpErrString =
                      "executeTests: Method threw an exception: testCase"
                      + tcNum + "(): ";
  
                  logTraceMsg("executeTests: invoking testCase" + tcNum
                              + " now.");
                  currTestCase.invoke(testObject, noArgs);
              }
              catch (InvocationTargetException ite)
              {
  
                  // Catch any error, log it as a fail, and allow next test case to run
                  // @todo should we log it as an error instead?
                  gotException = true;
                  testResult = java.lang.Math.max(FAIL_RESULT, testResult);
  
                  // Grab the contained error 
                  java.lang.Throwable containedThrowable =
                      ite.getTargetException();
  
                  tmpErrString += ite.toString() + ":"
                                  + containedThrowable.toString();
  
                  logErrorMsg(tmpErrString);
                  logThrowable(ERRORMSG, ite, tmpErrString);
              }  // end of catch
              catch (Throwable t)
              {
  
                  // Catch any Exception or Error, log it as a fail, and allow next test case to run
                  gotException = true;
                  testResult = java.lang.Math.max(FAIL_RESULT, testResult);
                  tmpErrString += t.toString();
  
                  logErrorMsg(tmpErrString);
                  logThrowable(ERRORMSG, t, tmpErrString);
              }  // end of catch
          }  // end of for
  
          // Return true only if everything passed
          if (testResult == PASS_RESULT)
              return true;
          else
              return false;
      }  // end of executeTests
  
      //-----------------------------------------------------
      //-------- Test results logging routines --------
      //-----------------------------------------------------
  
      /**
       * Accessor for loggingLevel, determines what level of log*() calls get output.
       * @return loggingLevel, as an int.
       */
      public int getLoggingLevel()
      {
          return loggingLevel;
      }
  
      /**
       * Accessor for loggingLevel, determines what level of log*() calls get output.
       * @param setLL loggingLevel; normalized to be between CRITICALMSG and TRACEMSG.
       */
      public void setLoggingLevel(int setLL)
      {
  
          if (setLL < CRITICALMSG)
          {
              loggingLevel = CRITICALMSG;
          }
          else if (setLL > TRACEMSG)
          {
              loggingLevel = TRACEMSG;
          }
          else
          {
              loggingLevel = setLL;
          }
      }
  
      /**
       * Report a comment to result file with specified severity.
       * <p>Works in conjunction with {@link #loggingLevel };
       * only outputs messages that are more severe (i.e. lower)
       * than the current logging level.</p>
       * <p>Note that some Loggers may limit the comment string,
       * either in overall length or by stripping any linefeeds, etc.
       * This is to allow for optimization of file or database-type
       * reporters with fixed fields.  Users who need to log out
       * special string data should use logArbitrary() instead.</p>
       * <p>Remember, use {@link #check(String, String, String)
       * various check*() methods} to report the actual results
       * of your tests.</p>
       * @author Shane_Curcuru@lotus.com
       * @param level severity of message.
       * @param msg comment to log out.
       * @see #loggingLevel
       */
      public void logMsg(int level, String msg)
      {
  
          if (level > loggingLevel)
              return;
  
          for (int i = 0; i < numLoggers; i++)
          {
              loggers[i].logMsg(level, msg);
          }
      }
  
      /**
       * Report an arbitrary String to result file with specified severity.
       * Log out the String provided exactly as-is.
       * @author Shane_Curcuru@lotus.com
       * @param level severity or class of message.
       * @param msg arbitrary String to log out.
       */
      public void logArbitrary(int level, String msg)
      {
  
          if (level > loggingLevel)
              return;
  
          for (int i = 0; i < numLoggers; i++)
          {
              loggers[i].logArbitrary(level, msg);
          }
      }
  
      /**
       * Logs out statistics to result file with specified severity.
       * <p>This is a general-purpose way to log out numeric statistics.  We accept
       * both a long and a double to allow users to save whatever kind of numbers
       * they need to, with the simplest API.  The actual meanings of the numbers
       * are dependent on the implementer.</p>
       * @author Shane_Curcuru@lotus.com
       * @param level severity of message.
       * @param lVal statistic in long format.
       * @param dVal statistic in doubleformat.
       * @param msg comment to log out.
       */
      public void logStatistic(int level, long lVal, double dVal, String msg)
      {
  
          if (level > loggingLevel)
              return;
  
          for (int i = 0; i < numLoggers; i++)
          {
              loggers[i].logStatistic(level, lVal, dVal, msg);
          }
      }
  
      /**
       * Logs out a element to results with specified severity.
       * This method is primarily for reporters that output to fixed
       * structures, like files, XML data, or databases.
       * @author Shane_Curcuru@lotus.com
       * @param level severity of message.
       * @param element name of enclosing element
       * @param attrs hash of name=value attributes
       * @param msg Object to log out; up to reporters to handle
       * processing of this; usually logs just .toString().
       */
      public void logElement(int level, String element, Hashtable attrs,
                             Object msg)
      {
  
          if (level > loggingLevel)
              return;
  
          for (int i = 0; i < numLoggers; i++)
          {
              loggers[i].logElement(level, element, attrs, msg);
          }
      }
  
      /**
       * Logs out throwable.toString() and stack trace to result file with specified severity.
       * <p>Works in conjuntion with setLoggingLevel(int); only outputs messages that
       * are more severe than the current logging level.</p>
       * <p>This uses logArbitrary to log out your message, newline, throwable.toString(), newline,
       * and then throwable.printStackTrace(). No corresponding Logger call exists.</p>
       * @author Shane_Curcuru@lotus.com
       * @param level severity of message.
       * @param throwable thrown throwable/exception to log out.
       * @param msg description of the throwable.
       */
      public void logThrowable(int level, Throwable throwable, String msg)
      {
  
          if (level > loggingLevel)
              return;
  
          StringWriter sWriter = new StringWriter();
  
          sWriter.write(msg + "\n");
          sWriter.write(throwable.toString() + "\n");
  
          PrintWriter pWriter = new PrintWriter(sWriter);
  
          throwable.printStackTrace(pWriter);
  
          for (int i = 0; i < numLoggers; i++)
          {
              loggers[i].logArbitrary(level, sWriter.toString());
          }
      }
  
      /**
       * Logs out contents of a Hashtable with specified severity.
       * <p>Works in conjuntion with setLoggingLevel(int); only outputs messages that
       * are more severe than the current logging level.</p>
       * <p>Loggers should store or log the full contents of the hashtable.</p>
       * @author Shane_Curcuru@lotus.com
       * @param level severity of message.
       * @param hash Hashtable to log the contents of.
       * @param msg description of the Hashtable.
       */
      public void logHashtable(int level, Hashtable hash, String msg)
      {
  
          if (level > loggingLevel)
              return;
  
          for (int i = 0; i < numLoggers; i++)
          {
              loggers[i].logHashtable(level, hash, msg);
          }
      }
  
      /**
       * Logs out an critical a comment to results; always printed out.
       * @author Shane_Curcuru@lotus.com
       * @param msg comment to log out.
       */
      public void logCriticalMsg(String msg)
      {
          logMsg(CRITICALMSG, msg);
      }
  
      // There is no logFailsOnlyMsg(String msg) method
  
      /**
       * Logs out an error a comment to results.
       * <p>Note that subclassed libraries may choose to override to
       * cause a fail to happen along with printing out the message.</p>
       * @author Shane_Curcuru@lotus.com
       * @param msg comment to log out.
       */
      public void logErrorMsg(String msg)
      {
          logMsg(ERRORMSG, msg);
      }
  
      /**
       * Logs out a warning a comment to results.
       * @author Shane_Curcuru@lotus.com
       * @param msg comment to log out.
       */
      public void logWarningMsg(String msg)
      {
          logMsg(WARNINGMSG, msg);
      }
  
      /**
       * Logs out an status a comment to results.
       * @author Shane_Curcuru@lotus.com
       * @param msg comment to log out.
       */
      public void logStatusMsg(String msg)
      {
          logMsg(STATUSMSG, msg);
      }
  
      /**
       * Logs out an informational a comment to results.
       * @author Shane_Curcuru@lotus.com
       * @param msg comment to log out.
       */
      public void logInfoMsg(String msg)
      {
          logMsg(INFOMSG, msg);
      }
  
      /**
       * Logs out an trace a comment to results.
       * @author Shane_Curcuru@lotus.com
       * @param msg comment to log out.
       */
      public void logTraceMsg(String msg)
      {
          logMsg(TRACEMSG, msg);
      }
  
      //-----------------------------------------------------
      //-------- Test results reporting check* routines --------
      //-----------------------------------------------------
      // There is no public void checkIncp(String comment) method
  
      /**
       * Writes out a Pass record with comment.
       * A Pass signifies that an individual test point has completed and has
       * been verified to have behaved correctly.
       * <p>If you need to do your own specific comparisons, you can
       * do them in your code and then just call checkPass or checkFail.</p>
       * <p>Derived classes must implement this to <B>both</B> report the
       * results out appropriately <B>and</B> to summate the results, if needed.</p>
       * <p>Pass results are a low priority, except for INCP (incomplete).  Note
       * that if a test never calls check*(), it will have an incomplete result.</p>
       * @author Shane_Curcuru@lotus.com
       * @param comment to log with the pass record.
       */
      public void checkPass(String comment)
      {
  
          // Increment our results counters 
          incrementResultCounter(CHECKS, PASS_RESULT);
  
          // Special: only report it actually if needed
          if (getLoggingLevel() > FAILSONLY)
          {
              for (int i = 0; i < numLoggers; i++)
              {
                  loggers[i].checkPass(comment);
              }
          }
  
          caseResult = java.lang.Math.max(PASS_RESULT, caseResult);
      }
  
      /**
       * Writes out an ambiguous record with comment.
       * <p>Ambiguous results are neither pass nor fail. Different test
       * libraries may have slightly different reasons for using ambg.</p>
       * <p>Derived classes must implement this to <B>both</B> report the
       * results out appropriately <B>and</B> to summate the results, if needed.</p>
       * <p>Ambg results have a middling priority, and take precedence over incomplete and pass.</p>
       * <p>An Ambiguous result may signify that the test point has completed and either
       * appears to have succeded, or that it has produced a result but there is no known
       * 'gold' result to compare it to.</p>
       * @author Shane_Curcuru@lotus.com
       * @param comment to log with the ambg record.
       */
      public void checkAmbiguous(String comment)
      {
  
          // Increment our results counters 
          incrementResultCounter(CHECKS, AMBG_RESULT);
  
          for (int i = 0; i < numLoggers; i++)
          {
              loggers[i].checkAmbiguous(comment);
          }
  
          caseResult = java.lang.Math.max(AMBG_RESULT, caseResult);
      }
  
      /**
       * Writes out a Fail record with comment.
       * <p>If you need to do your own specific comparisons, you can
       * do them in your code and then just call checkPass or checkFail.</p>
       * <p>Derived classes must implement this to <B>both</B> report the
       * results out appropriately <B>and</B> to summate the results, if needed.</p>
       * <p>Fail results have a high priority, and take precedence over incomplete, pass, and ambiguous.</p>
       * <p>A Fail signifies that an individual test point has completed and has
       * been verified to have behaved <B>in</B>correctly.</p>
       * @author Shane_Curcuru@lotus.com
       * @param comment to log with the fail record.
       */
      public void checkFail(String comment)
      {
  
          // Increment our results counters 
          incrementResultCounter(CHECKS, FAIL_RESULT);
  
          for (int i = 0; i < numLoggers; i++)
          {
              loggers[i].checkFail(comment);
          }
  
          caseResult = java.lang.Math.max(FAIL_RESULT, caseResult);
      }
  
      /**
       * Writes out an Error record with comment.
       * <p>Derived classes must implement this to <B>both</B> report the
       * results out appropriately <B>and</B> to summate the results, if needed.</p>
       * <p>Error results have the highest priority, and take precedence over
       * all other results.</p>
       * <p>An Error signifies that something unusual has gone wrong with the execution
       * of the test at this point - likely something that will require a human to
       * debug to see what really happened.</p>
       * @author Shane_Curcuru@lotus.com
       * @param comment to log with the error record.
       */
      public void checkErr(String comment)
      {
  
          // Increment our results counters 
          incrementResultCounter(CHECKS, ERRR_RESULT);
  
          for (int i = 0; i < numLoggers; i++)
          {
              loggers[i].checkErr(comment);
          }
  
          caseResult = java.lang.Math.max(ERRR_RESULT, caseResult);
      }
  
      //-----------------------------------------------------
      //-------- Simplified Performance Logging - beyond interface Reporter --------
      //-----------------------------------------------------
  
      /** NEEDSDOC Field DEFAULT_PERFLOGGING_LEVEL          */
      protected final boolean DEFAULT_PERFLOGGING_LEVEL = false;
  
      /**
       * This determines if performance information is logged out to results.
       * <p>When true, extra performance records are written out to result files.</p>
       * @see #logPerfMsg(java.lang.String, long, java.lang.String)
       */
      protected boolean perfLogging = DEFAULT_PERFLOGGING_LEVEL;
  
      /**
       * Accessor for perfLogging, determines if we log performance info.
       * @todo add PerfLogging to Reporter interface
       * @return Whether or not we log performance info.
       */
      public boolean getPerfLogging()
      {
          return (perfLogging);
      }
  
      /**
       * Accessor for perfLogging, determines if we log performance info.
       * @param Whether or not we log performance info.
       *
       * NEEDSDOC @param setPL
       */
      public void setPerfLogging(boolean setPL)
      {
          perfLogging = setPL;
      }
  
      /**
       * Constants used to mark performance records in output.
       */
  
      // Note: string representations are explicitly set to all be 
      //       4 characters long to make it simpler to parse results
      public static final String TEST_START = "TSrt";
  
      /** NEEDSDOC Field TEST_STOP          */
      public static final String TEST_STOP = "TStp";
  
      /** NEEDSDOC Field CASE_START          */
      public static final String CASE_START = "CSrt";
  
      /** NEEDSDOC Field CASE_STOP          */
      public static final String CASE_STOP = "CStp";
  
      /** NEEDSDOC Field USER_TIMER          */
      public static final String USER_TIMER = "UTmr";
  
      /** NEEDSDOC Field USER_TIMESTAMP          */
      public static final String USER_TIMESTAMP = "UTim";
  
      /** NEEDSDOC Field USER_MEMORY          */
      public static final String USER_MEMORY = "UMem";
  
      /** NEEDSDOC Field PERF_SEPARATOR          */
      public static final String PERF_SEPARATOR = ";";
  
      /**
       * Logs out a performance statistic.
       * <p>Only logs times if perfLogging set to true.</p>
       * <p>As an optimization for record-based Loggers, this is a rather simplistic
       * way to log performance info - however it's sufficient for most purposes.</p>
       * @author Frank Bell
       * @param type type of performance statistic.
       * @param data long value of performance statistic.
       * @param msg comment to log out.
       */
      public void logPerfMsg(String type, long data, String msg)
      {
  
          if (getPerfLogging())
          {
              double dummy = 0;
  
              for (int i = 0; i < numLoggers; i++)
              {
  
                  // NEEDSWORK: simply put it at the current loggingLevel we have set
                  //            Is there a better way to mesh performance output with the rest?
                  loggers[i].logStatistic(loggingLevel, data, dummy,
                                          type + PERF_SEPARATOR + msg);
              }
          }
      }
  
      /**
       * Captures current time in milliseconds, only if perfLogging.
       * @author Shane_Curcuru@lotus.com
       * @param msg comment to log out.
       */
      protected Hashtable perfTimers = new Hashtable();
  
      /**
       * NEEDSDOC Method startTimer 
       *
       *
       * NEEDSDOC @param msg
       */
      public void startTimer(String msg)
      {
  
          // Note optimization: only capture times if perfLogging
          if ((perfLogging) && (msg != null))
          {
              perfTimers.put(msg, new Long(System.currentTimeMillis()));
          }
      }
  
      /**
       * Captures current time in milliseconds and logs out difference.
       * Will only log times if perfLogging set to true.
       * <p>Only logs time if it finds a corresponding msg entry that was startTimer'd.</p>
       * @author Shane_Curcuru@lotus.com
       * @param msg comment to log out.
       */
      public void stopTimer(String msg)
      {
  
          // Capture time immediately to reduce latency
          long stopTime = System.currentTimeMillis();
  
          // Note optimization: only use times if perfLogging
          if ((perfLogging) && (msg != null))
          {
              Long startTime = (Long) perfTimers.get(msg);
  
              logPerfMsg(USER_TIMER, (stopTime - startTime.longValue()), msg);
              perfTimers.remove(msg);
          }
      }
  
      /**
       * Accessor for currently running test case number, read-only.
       * @return current test case number.
       */
      public int getCurrentCaseNum()
      {
          return caseNum;
      }
  
      /**
       * Accessor for current test case's result, read-only.
       * @return current test case result.
       */
      public int getCurrentCaseResult()
      {
          return caseResult;
      }
  
      /**
       * Accessor for overall test file result, read-only.
       * @return test file's overall result.
       */
      public int getCurrentFileResult()
      {
          return testResult;
      }
  
      /**
       * Utility method to log out overall result counters.  
       *
       * NEEDSDOC @param count
       * NEEDSDOC @param desc
       */
      protected void logResultsCounter(int count, String desc)
      {
  
          // Optimization: Only log the kinds of results we have
          if (count > 0)
              logStatistic(loggingLevel, count, 0, desc);
      }
  
      /** Utility method to log out overall result counters. */
      public void logResultsCounters()
      {
  
          // NEEDSWORK: what's the best format to display this stuff in?
          // NEEDSWORK: what loggingLevel should we use?
          // NEEDSWORK: temporarily skipping the 'files' since 
          //            we only have tests with one file being run
          // logResultsCounter(incpCount[FILES], "incpCount[FILES]");
          logResultsCounter(incpCount[CASES], "incpCount[CASES]");
          logResultsCounter(incpCount[CHECKS], "incpCount[CHECKS]");
  
          // logResultsCounter(passCount[FILES], "passCount[FILES]");
          logResultsCounter(passCount[CASES], "passCount[CASES]");
          logResultsCounter(passCount[CHECKS], "passCount[CHECKS]");
  
          // logResultsCounter(ambgCount[FILES], "ambgCount[FILES]");
          logResultsCounter(ambgCount[CASES], "ambgCount[CASES]");
          logResultsCounter(ambgCount[CHECKS], "ambgCount[CHECKS]");
  
          // logResultsCounter(failCount[FILES], "failCount[FILES]");
          logResultsCounter(failCount[CASES], "failCount[CASES]");
          logResultsCounter(failCount[CHECKS], "failCount[CHECKS]");
  
          // logResultsCounter(errrCount[FILES], "errrCount[FILES]");
          logResultsCounter(errrCount[CASES], "errrCount[CASES]");
          logResultsCounter(errrCount[CHECKS], "errrCount[CHECKS]");
      }
  
      //-----------------------------------------------------
      //-------- Test results reporting check* routines --------
      //-----------------------------------------------------
  
      /**
       * Compares actual and expected, and logs the result, pass/fail.
       * The comment you pass is added along with the pass/fail, of course.
       * Currenly, you may pass a pair of any of these simple {type}:
       * <ui>
       * <li>boolean</li>
       * <li>byte</li>
       * <li>short</li>
       * <li>int</li>
       * <li>long</li>
       * <li>float</li>
       * <li>double</li>
       * <li>String</li>
       * </ui>
       * <p>While tests could simply call checkPass(comment), providing these convenience
       * method can save lines of code, since you can replace:</p>
       * <code>if (foo = bar) <BR>
       *           checkPass(comment); <BR>
       *       else <BR>
       *           checkFail(comment);</code>
       * <p>With the much simpler:</p>
       * <code>check(foo, bar, comment);</code>
       * <p>Plus, you can either use or ignore the boolean return value.</p>
       * <p>Note that individual methods checkInt(...), checkLong(...), etc. also exist.
       * These type-independent overriden methods are provided as a convenience to
       * Java-only testwriters.  JavaScript scripts must call the
       * type-specific checkInt(...), checkString(...), etc. methods directly.</p>
       * <p>Note that testwriters are free to ignore the boolean return value.</p>
       * @author Shane_Curcuru@lotus.com
       * @param actual value returned from your test code.
       * @param expected value that test should return to pass.
       * @param comment to log out with result.
       * @return status, true=pass, false otherwise
       * @see #checkPass
       * @see #checkFail
       * @see #checkObject
       */
      public boolean check(boolean actual, boolean expected, String comment)
      {
          return (checkBool(actual, expected, comment));
      }
  
      /**
       * NEEDSDOC Method check 
       *
       *
       * NEEDSDOC @param actual
       * NEEDSDOC @param expected
       * NEEDSDOC @param comment
       *
       * NEEDSDOC (check) @return
       */
      public boolean check(byte actual, byte expected, String comment)
      {
          return (checkByte(actual, expected, comment));
      }
  
      /**
       * NEEDSDOC Method check 
       *
       *
       * NEEDSDOC @param actual
       * NEEDSDOC @param expected
       * NEEDSDOC @param comment
       *
       * NEEDSDOC (check) @return
       */
      public boolean check(short actual, short expected, String comment)
      {
          return (checkShort(actual, expected, comment));
      }
  
      /**
       * NEEDSDOC Method check 
       *
       *
       * NEEDSDOC @param actual
       * NEEDSDOC @param expected
       * NEEDSDOC @param comment
       *
       * NEEDSDOC (check) @return
       */
      public boolean check(int actual, int expected, String comment)
      {
          return (checkInt(actual, expected, comment));
      }
  
      /**
       * NEEDSDOC Method check 
       *
       *
       * NEEDSDOC @param actual
       * NEEDSDOC @param expected
       * NEEDSDOC @param comment
       *
       * NEEDSDOC (check) @return
       */
      public boolean check(long actual, long expected, String comment)
      {
          return (checkLong(actual, expected, comment));
      }
  
      /**
       * NEEDSDOC Method check 
       *
       *
       * NEEDSDOC @param actual
       * NEEDSDOC @param expected
       * NEEDSDOC @param comment
       *
       * NEEDSDOC (check) @return
       */
      public boolean check(float actual, float expected, String comment)
      {
          return (checkFloat(actual, expected, comment));
      }
  
      /**
       * NEEDSDOC Method check 
       *
       *
       * NEEDSDOC @param actual
       * NEEDSDOC @param expected
       * NEEDSDOC @param comment
       *
       * NEEDSDOC (check) @return
       */
      public boolean check(double actual, double expected, String comment)
      {
          return (checkDouble(actual, expected, comment));
      }
  
      /**
       * NEEDSDOC Method check 
       *
       *
       * NEEDSDOC @param actual
       * NEEDSDOC @param expected
       * NEEDSDOC @param comment
       *
       * NEEDSDOC (check) @return
       */
      public boolean check(String actual, String expected, String comment)
      {
          return (checkString(actual, expected, comment));
      }
  
      // No check(Object, Object, String) currently provided, please call checkObject(...) directly
  
      /**
       * Compares actual and expected (Object), and logs the result, pass/fail.
       * <p><b>Special note for checkObject:</b></p>
       * <p>Since this takes an object reference and not a primitive type,
       * it works slightly differently than other check{Type} methods.</p>
       * <ui>
       * <li>If both are null, then Pass</li>
       * <li>Else If actual.equals(expected) than Pass</li>
       * <li>Else Fail</li>
       * </ui>
       * @author Shane_Curcuru@lotus.com
       * @param actual Object returned from your test code.
       * @param expected Object that test should return to pass.
       * @param comment to log out with result.
       * @see #checkPass
       * @see #checkFail
       * @see #check
       *
       * NEEDSDOC ($objectName$) @return
       */
      public boolean checkObject(Object actual, Object expected, String comment)
      {
  
          // Pass if both null, or both valid & equals
          if (actual != null)
          {
              if (actual.equals(expected))
              {
                  checkPass(comment);
  
                  return true;
              }
              else
              {
                  checkFail(comment);
  
                  return false;
              }
          }
          else
          {  // actual is null, so can't use .equals
              if (expected == null)
              {
                  checkPass(comment);
  
                  return true;
              }
              else
              {
                  checkFail(comment);
  
                  return false;
              }
          }
      }
  
      /**
       * NEEDSDOC Method checkBool 
       *
       *
       * NEEDSDOC @param actual
       * NEEDSDOC @param expected
       * NEEDSDOC @param comment
       *
       * NEEDSDOC (checkBool) @return
       */
      public boolean checkBool(boolean actual, boolean expected, String comment)
      {
  
          if (actual == expected)
          {
              checkPass(comment);
  
              return true;
          }
          else
          {
              checkFail(comment);
  
              return false;
          }
      }
  
      /**
       * NEEDSDOC Method checkByte 
       *
       *
       * NEEDSDOC @param actual
       * NEEDSDOC @param expected
       * NEEDSDOC @param comment
       *
       * NEEDSDOC (checkByte) @return
       */
      public boolean checkByte(byte actual, byte expected, String comment)
      {
  
          if (actual == expected)
          {
              checkPass(comment);
  
              return true;
          }
          else
          {
              checkFail(comment);
  
              return false;
          }
      }
  
      /**
       * NEEDSDOC Method checkShort 
       *
       *
       * NEEDSDOC @param actual
       * NEEDSDOC @param expected
       * NEEDSDOC @param comment
       *
       * NEEDSDOC (checkShort) @return
       */
      public boolean checkShort(short actual, short expected, String comment)
      {
  
          if (actual == expected)
          {
              checkPass(comment);
  
              return true;
          }
          else
          {
              checkFail(comment);
  
              return false;
          }
      }
  
      /**
       * NEEDSDOC Method checkInt 
       *
       *
       * NEEDSDOC @param actual
       * NEEDSDOC @param expected
       * NEEDSDOC @param comment
       *
       * NEEDSDOC (checkInt) @return
       */
      public boolean checkInt(int actual, int expected, String comment)
      {
  
          if (actual == expected)
          {
              checkPass(comment);
  
              return true;
          }
          else
          {
              checkFail(comment);
  
              return false;
          }
      }
  
      /**
       * NEEDSDOC Method checkLong 
       *
       *
       * NEEDSDOC @param actual
       * NEEDSDOC @param expected
       * NEEDSDOC @param comment
       *
       * NEEDSDOC (checkLong) @return
       */
      public boolean checkLong(long actual, long expected, String comment)
      {
  
          if (actual == expected)
          {
              checkPass(comment);
  
              return true;
          }
          else
          {
              checkFail(comment);
  
              return false;
          }
      }
  
      /**
       * NEEDSDOC Method checkFloat 
       *
       *
       * NEEDSDOC @param actual
       * NEEDSDOC @param expected
       * NEEDSDOC @param comment
       *
       * NEEDSDOC (checkFloat) @return
       */
      public boolean checkFloat(float actual, float expected, String comment)
      {
  
          if (actual == expected)
          {
              checkPass(comment);
  
              return true;
          }
          else
          {
              checkFail(comment);
  
              return false;
          }
      }
  
      /**
       * NEEDSDOC Method checkDouble 
       *
       *
       * NEEDSDOC @param actual
       * NEEDSDOC @param expected
       * NEEDSDOC @param comment
       *
       * NEEDSDOC (checkDouble) @return
       */
      public boolean checkDouble(double actual, double expected, String comment)
      {
  
          if (actual == expected)
          {
              checkPass(comment);
  
              return true;
          }
          else
          {
              checkFail(comment);
  
              return false;
          }
      }
  
      /**
       * Compares actual and expected (String), and logs the result, pass/fail.
       * <p><b>Special note for checkString:</b></p>
       * <p>Since this takes a String object and not a primitive type,
       * it works slightly differently than other check{Type} methods.</p>
       * <ui>
       * <li>If both are null, then Pass</li>
       * <li>Else If actual.compareTo(expected) == 0 than Pass</li>
       * <li>Else Fail</li>
       * </ui>
       * @author Shane_Curcuru@lotus.com
       * @param actual String returned from your test code.
       * @param expected String that test should return to pass.
       * @param comment to log out with result.
       * @see #checkPass
       * @see #checkFail
       * @see #checkObject
       *
       * NEEDSDOC ($objectName$) @return
       */
      public boolean checkString(String actual, String expected, String comment)
      {
  
          // Pass if both null, or both valid & equals
          if (actual != null)
          {
  
              // .compareTo returns 0 if the strings match lexicographically
              if (actual.compareTo(expected) == 0)
              {
                  checkPass(comment);
  
                  return true;
              }
              else
              {
                  checkFail(comment);
  
                  return false;
              }
          }
          else
          {  // actual is null, so can't use .equals
              if (expected == null)
              {
                  checkPass(comment);
  
                  return true;
              }
              else
              {
                  checkFail(comment);
  
                  return false;
              }
          }
      }
  
      /**
       * Uses an external CheckService to Compares actual and expected,
       * and logs the result, pass/fail.
       * <p>CheckServices may be implemented to do custom equivalency
       * checking between complex object types. It is the responsibility
       * of the CheckService to call back into us to report results.</p>
       * @author Shane_Curcuru@lotus.com
       * @param CheckService implementation to use
       *
       * NEEDSDOC @param service
       * @param actual Object returned from your test code.
       * @param expected Object that test should return to pass.
       * @param comment to log out with result.
       * @see #checkPass
       * @see #checkFail
       * @see #check
       *
       * NEEDSDOC ($objectName$) @return
       */
      public boolean check(CheckService service, Object actual,
                           Object expected, String comment)
      {
  
          if (service == null)
          {
              checkErr("CheckService null for: " + comment);
  
              return false;
          }
  
          if (service.check(this, actual, expected, comment) == PASS_RESULT)
              return true;
          else
              return false;
      }
  
      /** Flag to control internal debugging of Reporter; sends extra info to System.out. */
      protected boolean debug = false;
  
      /**
       * Accessor for internal debugging flag.  
       *
       * NEEDSDOC ($objectName$) @return
       */
      public boolean getDebug()
      {
          return (debug);
      }
  
      /**
       * Accessor for internal debugging flag.  
       *
       * NEEDSDOC @param setDbg
       */
      public void setDebug(boolean setDbg)
      {
  
          debug = setDbg;
  
          debugPrintln("setDebug enabled");  // will only print if setDbg was true
      }
  
      /**
       * Basic debugging output wrapper for Reporter.  
       *
       * NEEDSDOC @param msg
       */
      public void debugPrintln(String msg)
      {
  
          if (!debug)
              return;
  
          // If we have reporters, use them
          if (numLoggers > 0)
              logCriticalMsg("RI.dP: " + msg);
  
              // Otherwise, just dump to the console
          else
              System.out.println("RI.dP: " + msg);
      }
  
      /**
       * Utility method to increment result counters.  
       *
       * NEEDSDOC @param ctrOffset
       * NEEDSDOC @param r
       */
      public void incrementResultCounter(int ctrOffset, int r)
      {
  
          switch (r)
          {
          case INCP_RESULT :
              incpCount[ctrOffset]++;
              break;
          case PASS_RESULT :
              passCount[ctrOffset]++;
              break;
          case AMBG_RESULT :
              ambgCount[ctrOffset]++;
              break;
          case FAIL_RESULT :
              failCount[ctrOffset]++;
              break;
          case ERRR_RESULT :
              errrCount[ctrOffset]++;
              break;
          default :
              ;  // NEEDSWORK: should we report this, or allow users to add their own counters?
          }
      }
  
      /**
       * Utility method to translate an int result to a string.  
       *
       * NEEDSDOC @param r
       *
       * NEEDSDOC ($objectName$) @return
       */
      public static String resultToString(int r)
      {
  
          switch (r)
          {
          case INCP_RESULT :
              return (INCP);
          case PASS_RESULT :
              return (PASS);
          case AMBG_RESULT :
              return (AMBG);
          case FAIL_RESULT :
              return (FAIL);
          case ERRR_RESULT :
              return (ERRR);
          default :
              return ("Unkn");  // NEEDSWORK: should have better constant for this
          }
      }
  }  // end of class Reporter
  
  
  
  
  1.1                  xml-xalan/test/java/src/org/apache/qetest/SimpleFileCheckService.java
  
  Index: SimpleFileCheckService.java
  ===================================================================
  /*
   * The Apache Software License, Version 1.1
   *
   *
   * Copyright (c) 2000 The Apache Software Foundation.  All rights 
   * reserved.
   *
   * Redistribution and use in source and binary forms, with or without
   * modification, are permitted provided that the following conditions
   * are met:
   *
   * 1. Redistributions of source code must retain the above copyright
   *    notice, this list of conditions and the following disclaimer. 
   *
   * 2. Redistributions in binary form must reproduce the above copyright
   *    notice, this list of conditions and the following disclaimer in
   *    the documentation and/or other materials provided with the
   *    distribution.
   *
   * 3. The end-user documentation included with the redistribution,
   *    if any, must include the following acknowledgment:  
   *       "This product includes software developed by the
   *        Apache Software Foundation (http://www.apache.org/)."
   *    Alternately, this acknowledgment may appear in the software itself,
   *    if and wherever such third-party acknowledgments normally appear.
   *
   * 4. The names "Xalan" and "Apache Software Foundation" must
   *    not be used to endorse or promote products derived from this
   *    software without prior written permission. For written 
   *    permission, please contact apache@apache.org.
   *
   * 5. Products derived from this software may not be called "Apache",
   *    nor may "Apache" appear in their name, without prior written
   *    permission of the Apache Software Foundation.
   *
   * THIS SOFTWARE IS PROVIDED ``AS IS'' AND ANY EXPRESSED OR IMPLIED
   * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
   * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
   * DISCLAIMED.  IN NO EVENT SHALL THE APACHE SOFTWARE FOUNDATION OR
   * ITS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
   * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
   * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF
   * USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
   * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
   * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
   * OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
   * SUCH DAMAGE.
   * ====================================================================
   *
   * This software consists of voluntary contributions made by many
   * individuals on behalf of the Apache Software Foundation and was
   * originally based on software copyright (c) 2000, Lotus
   * Development Corporation., http://www.lotus.com.  For more
   * information on the Apache Software Foundation, please see
   * <http://www.apache.org/>.
   */
  
  /*
   *
   * SimpleFileCheckService.java
   *
   */
  package org.apache.qetest;
  
  import java.io.BufferedReader;
  import java.io.FileReader;
  import java.io.File;
  
  /**
   * Simply does .readLine of each file into string buffers and then String.equals().
   * @author Shane_Curcuru@lotus.com
   * @version $Id: SimpleFileCheckService.java,v 1.1 2000/11/01 23:30:51 curcuru Exp $
   */
  public class SimpleFileCheckService implements CheckService
  {
  
      /**
       * Compare two objects for equivalence, and return appropriate result.
       * Implementers should provide the details of their "equals"
       * algorithim in getCheckMethod().
       * Note that the order of actual, reference is usually important
       * important in determining the result.
       * <li>Typically:
       * <ul>any unexpected Exceptions thrown -> ERRR_RESULT</ul>
       * <ul>either object is not a File -> ERRR_RESULT</ul>
       * <ul>actual does not exist -> FAIL_RESULT</ul>
       * <ul>reference does not exist -> AMBG_RESULT</ul>
       * <ul>actual is equivalent to reference -> PASS_RESULT</ul>
       * <ul>actual is not equivalent to reference -> FAIL_RESULT</ul>
       * </li>
       *
       * @param reporter to dump any output messages to
       * @param actual (current) File to check
       * @param reference (gold, or expected) File to check against
       * @param description of what you're checking
       * NEEDSDOC @param msg
       * @return Reporter.*_RESULT code denoting status; each method may define
       * it's own meanings for pass, fail, ambiguous, etc.
       */
      public int check(Reporter reporter, Object actual, Object reference,
                       String msg)
      {
  
          if (!((actual instanceof File) & (reference instanceof File)))
          {
  
              // Must have File objects to continue
              reporter.checkErr(
                  "SimpleFileCheckService only takes files, with: " + msg);
  
              return reporter.ERRR_RESULT;
          }
  
          String fVal1 = readFileIntoString((File) actual);
  
          // Fail if Actual file doesn't exist
          if (fVal1 == null)
          {
              reporter.checkFail(msg);
  
              return Reporter.FAIL_RESULT;
          }
  
          String fVal2 = readFileIntoString((File) reference);
  
          // Ambiguous if gold or reference file doesn't exist
          if (fVal2 == null)
          {
              reporter.checkAmbiguous(msg);
  
              return Reporter.AMBG_RESULT;
          }
  
          // Pass if they're equal, fail otherwise        
          if (fVal1.equals(fVal2))
          {
              reporter.checkPass(msg);
  
              return Reporter.PASS_RESULT;
          }
          else
          {
              reporter.checkFail(msg);
  
              return Reporter.FAIL_RESULT;
          }
      }
  
      /**
       * Compare two files for equivalence, and return appropriate *_RESULT flag.
       * @param file1 Actual (current) file to check
       * @param file2 Reference (gold, or expected) file to check against
       * @return PASS if equal, FAIL if not, AMBG if gold does not exist
       * <P>Uses appropriate values from Reporter.</P>
       */
      public int checkFiles(File file1, File file2)
      {
  
          String fVal1 = readFileIntoString(file1);
  
          // Fail if Actual file doesn't exist
          if (fVal1 == null)
              return (Reporter.FAIL_RESULT);
  
          String fVal2 = readFileIntoString(file2);
  
          // Ambiguous if gold or reference file doesn't exist
          if (fVal2 == null)
              return (Reporter.AMBG_RESULT);
  
          // Pass if they're equal, fail otherwise        
          if (fVal1.equals(fVal2))
              return (Reporter.PASS_RESULT);
          else
              return (Reporter.FAIL_RESULT);
      }
  
      /**
       * Read text file into string line-by-line.  
       *
       * NEEDSDOC @param f
       *
       * NEEDSDOC ($objectName$) @return
       */
      private String readFileIntoString(File f)
      {
  
          StringBuffer sb = new StringBuffer();
  
          try
          {
              FileReader fr = new FileReader(f);
              BufferedReader br = new BufferedReader(fr);
  
              for (;;)
              {
                  String inbuf = br.readLine();
  
                  if (inbuf == null)
                      break;
  
                  sb.append(inbuf);
              }
          }
          catch (Exception e)
          {
              System.err.println("SimpleFileCheckService(" + f.getPath()
                                 + ") exception :" + e.toString());
  
              return (null);
          }
  
          return sb.toString();
      }
  
      /**
       * Description of algorithim used to check file equivalence.  
       *
       * NEEDSDOC ($objectName$) @return
       */
      public String getDescription()
      {
          return ("Reads in text files line-by-line as strings (ignoring newlines) and does String.equals()");
      }
  
      /**
       * Gets extended information about the last checkFiles call: NONE AVAILABLE.
       * @return null, since we don't support this
       */
      public String getExtendedInfo()
      {
          return null;
      }
  }  // end of class SimpleFileCheckService
  
  
  
  
  1.1                  xml-xalan/test/java/src/org/apache/qetest/Test.java
  
  Index: Test.java
  ===================================================================
  /*
   * The Apache Software License, Version 1.1
   *
   *
   * Copyright (c) 2000 The Apache Software Foundation.  All rights 
   * reserved.
   *
   * Redistribution and use in source and binary forms, with or without
   * modification, are permitted provided that the following conditions
   * are met:
   *
   * 1. Redistributions of source code must retain the above copyright
   *    notice, this list of conditions and the following disclaimer. 
   *
   * 2. Redistributions in binary form must reproduce the above copyright
   *    notice, this list of conditions and the following disclaimer in
   *    the documentation and/or other materials provided with the
   *    distribution.
   *
   * 3. The end-user documentation included with the redistribution,
   *    if any, must include the following acknowledgment:  
   *       "This product includes software developed by the
   *        Apache Software Foundation (http://www.apache.org/)."
   *    Alternately, this acknowledgment may appear in the software itself,
   *    if and wherever such third-party acknowledgments normally appear.
   *
   * 4. The names "Xalan" and "Apache Software Foundation" must
   *    not be used to endorse or promote products derived from this
   *    software without prior written permission. For written 
   *    permission, please contact apache@apache.org.
   *
   * 5. Products derived from this software may not be called "Apache",
   *    nor may "Apache" appear in their name, without prior written
   *    permission of the Apache Software Foundation.
   *
   * THIS SOFTWARE IS PROVIDED ``AS IS'' AND ANY EXPRESSED OR IMPLIED
   * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
   * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
   * DISCLAIMED.  IN NO EVENT SHALL THE APACHE SOFTWARE FOUNDATION OR
   * ITS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
   * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
   * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF
   * USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
   * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
   * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
   * OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
   * SUCH DAMAGE.
   * ====================================================================
   *
   * This software consists of voluntary contributions made by many
   * individuals on behalf of the Apache Software Foundation and was
   * originally based on software copyright (c) 2000, Lotus
   * Development Corporation., http://www.lotus.com.  For more
   * information on the Apache Software Foundation, please see
   * <http://www.apache.org/>.
   */
  
  /*
   *
   * Test.java
   *
   */
  package org.apache.qetest;
  
  import java.util.Properties;
  
  /**
   * Minimal interface defining a test.
   * Supplying a separate interface from the most common default
   * implementation makes it simpler for external harnesses or
   * automation methods to handle lists of Tests.
   * @author Shane_Curcuru@lotus.com
   * @version $Id: Test.java,v 1.1 2000/11/01 23:30:51 curcuru Exp $
   */
  public interface Test
  {
  
      /**
       * Accesor method for the name of this test.  
       *
       * NEEDSDOC ($objectName$) @return
       */
      public abstract String getTestName();
  
      /**
       * Accesor method for a brief description of this test.  
       *
       * NEEDSDOC ($objectName$) @return
       */
      public abstract String getTestDescription();
  
      /**
       * Accesor methods for our Reporter.
       * Tests will either have a Logger (for very simple tests)
       * or a Reporter (for most tests).
       * <p>Providing both API's in the interface allows us to run
       * the two styles of tests nearly interchangeably.</p>
       * @todo document this better; how to harnesses know which to use?
       * @param r the Reporter to have this test use for logging results
       */
      public abstract void setReporter(Reporter r);
  
      /**
       * Accesor methods for our Reporter.  
       *
       * NEEDSDOC ($objectName$) @return
       */
      public abstract Reporter getReporter();
  
      /**
       * Accesor methods for our Logger.
       * Tests will either have a Logger (for very simple tests)
       * or a Reporter (for most tests).
       * <p>Providing both API's in the interface allows us to run
       * the two styles of tests nearly interchangeably.</p>
       * @todo document this better; how to harnesses know which to use?
       * @param l the Logger to have this test use for logging results
       */
      public abstract void setLogger(Logger l);
  
      /**
       * Accesor methods for our Logger.  
       *
       * NEEDSDOC ($objectName$) @return
       */
      public abstract Logger getLogger();
  
      /**
       * Accesor methods for our abort flag.
       * If this flag is set during a test run, then we should simply
       * not bother to run the rest of the test.  In all other cases,
       * harnesses or Tests should attempt to continue running the
       * entire test including cleanup routines.
       * @param a true if we should halt processing this test
       */
      public abstract void setAbortTest(boolean a);
  
      /**
       * Accesor methods for our abort flag.  
       *
       * NEEDSDOC ($objectName$) @return
       */
      public abstract boolean getAbortTest();
  
      /**
       * Token used to pass command line as initializer.
       * Commonly tests may be run as applications - this token is
       * used as the name for the entry in the Properties block
       * that will contain the array of Strings that was the command
       * line for the application.
       * <p>This allows external test harnesses or specific test
       * implementations to easily pass in their command line using
       * the Properties argument in many Test methods.</p>
       */
      public static final String MAIN_CMDLINE = "test.CmdLine";
  
      /**
       * Run this test: main interface to cause the test to run itself.
       * A major goal of the Test class is to separate the act and
       * process of writing a test from it's actual runtime
       * implementation.  Testwriters should not generally need to
       * know how their test is being executed.
       * <ul>They should simply focus on defining:
       * <li>doTestFileInit: what setup has to be done before running
       * the testCases: initializing the product under test, etc.</li>
       * <li>testCase1, 2, ... n: individual, independent test cases</li>
       * <li>doTestFileClose: what cleanup has to be done after running
       * the test, like restoring product state or freeing test resources</li>
       * </ul>
       * <p>This method returns a simple boolean status as a convenience.
       * In cases where you have a harness that runs a great many
       * tests that normally pass, the harness can simply check this
       * value for each test: if it's true, you could even delete any
       * result logs then, and simply print out a meta-log stating
       * that the test passed.  Note that this does not provide any
       * information about why a test failed (or caused an error, or
       * whatever) - that's what the info in any reports/logs are for.</p>
       * <p>If a test is aborted, then any containing harness needs not
       * finish executing the test.  Otherwise, even if part of a test fails,
       * you should let the whole test run through.  Note that aborting
       * a test may result in the reporter or logger output being
       * incomplete, which may make an invalid report file (in the case
       * of XMLFileLogger, for example).</p>
       * @todo Maybe return TestResult instead of boolean flag?
       * @todo pass in a set of options for the test
       * @author Shane_Curcuru@lotus.com
       * @param Properties block used for initialization
       *
       * NEEDSDOC @param p
       * @return status - true if test ran to completion and <b>all</b>
       * cases passed, false otherwise
       */
      public abstract boolean runTest(Properties p);
  
      /**
       * Initialize this test - called once before running testcases.
       * @todo does this need to be in the interface? Shouldn't external
       * callers simply use the runTest() interface?
       * @author Shane_Curcuru@lotus.com
       * @param Properties block used for initialization
       *
       * NEEDSDOC @param p
       * @return true if setup and Reporter creation successful, false otherwise
       */
      public abstract boolean testFileInit(Properties p);
  
      /**
       * Run all of our testcases.
       * This should cause each testCase in the test to be executed
       * independently, and then return true if and only if all
       * testCases passed successfully.  If any testCase failed or
       * caused any unexpected errors, exceptions, etc., it should
       * return false.
       * @todo Maybe return TestResult instead of boolean flag?
       * @todo does this need to be in the interface? Shouldn't external
       * callers simply use the runTest() interface?
       * @author Shane_Curcuru@lotus.com
       * @param Properties block used for initialization
       *
       * NEEDSDOC @param p
       * @return true if all testCases passed, false otherwise
       */
      public abstract boolean runTestCases(Properties p);
  
      /**
       * Cleanup this test - called once after running testcases.
       * @todo does this need to be in the interface? Shouldn't external
       * callers simply use the runTest() interface?
       * @author Shane_Curcuru@lotus.com
       * @param Properties block used for initialization
       *
       * NEEDSDOC @param p
       * @return true if cleanup successful, false otherwise
       */
      public abstract boolean testFileClose(Properties p);
  }  // end of class Test
  
  
  
  
  1.1                  xml-xalan/test/java/src/org/apache/qetest/TestfileInfo.java
  
  Index: TestfileInfo.java
  ===================================================================
  /*
   * The Apache Software License, Version 1.1
   *
   *
   * Copyright (c) 2000 The Apache Software Foundation.  All rights 
   * reserved.
   *
   * Redistribution and use in source and binary forms, with or without
   * modification, are permitted provided that the following conditions
   * are met:
   *
   * 1. Redistributions of source code must retain the above copyright
   *    notice, this list of conditions and the following disclaimer. 
   *
   * 2. Redistributions in binary form must reproduce the above copyright
   *    notice, this list of conditions and the following disclaimer in
   *    the documentation and/or other materials provided with the
   *    distribution.
   *
   * 3. The end-user documentation included with the redistribution,
   *    if any, must include the following acknowledgment:  
   *       "This product includes software developed by the
   *        Apache Software Foundation (http://www.apache.org/)."
   *    Alternately, this acknowledgment may appear in the software itself,
   *    if and wherever such third-party acknowledgments normally appear.
   *
   * 4. The names "Xalan" and "Apache Software Foundation" must
   *    not be used to endorse or promote products derived from this
   *    software without prior written permission. For written 
   *    permission, please contact apache@apache.org.
   *
   * 5. Products derived from this software may not be called "Apache",
   *    nor may "Apache" appear in their name, without prior written
   *    permission of the Apache Software Foundation.
   *
   * THIS SOFTWARE IS PROVIDED ``AS IS'' AND ANY EXPRESSED OR IMPLIED
   * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
   * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
   * DISCLAIMED.  IN NO EVENT SHALL THE APACHE SOFTWARE FOUNDATION OR
   * ITS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
   * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
   * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF
   * USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
   * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
   * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
   * OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
   * SUCH DAMAGE.
   * ====================================================================
   *
   * This software consists of voluntary contributions made by many
   * individuals on behalf of the Apache Software Foundation and was
   * originally based on software copyright (c) 2000, Lotus
   * Development Corporation., http://www.lotus.com.  For more
   * information on the Apache Software Foundation, please see
   * <http://www.apache.org/>.
   */
  
  /*
   *
   * TestfileInfo.java
   *
   */
  package org.apache.qetest;
  
  import java.util.Properties;
  import java.util.StringTokenizer;
  
  /**
   * Simple data-holding class specifying info about one 'testfile'.
   * <p>This is purely a convenience class for tests that rely on
   * external datafiles. Tests will very commonly need input data,
   * will output operations to files, and compare those outputs to
   * known good or 'gold' files. A generic description and author
   * field are also provided.  A freeform String field of options
   * is included for easy extensibility.</p>
   * <ul>
   * <li>inputName</li>
   * <li>outputName</li>
   * <li>goldName</li>
   * <li>description</li>
   * <li>author</li>
   * <li>options</li>
   * </ul>
   * <p>Note that String representations are used, since this allows
   * for testing of how applications translate the names to File
   * objects, or whatever they use.</p>
   * @author Shane Curcuru
   * @version $Id: TestfileInfo.java,v 1.1 2000/11/01 23:30:51 curcuru Exp $
   * @todo Leave everything public for now for simplicity
   * Later, if this is a useful construct, we should improve on it's services:
   * allow it to verify it's own files, change absolute refs fo relative, etc.
   */
  public class TestfileInfo
  {
  
      /** Name of the input data file. */
      public String inputName = null;
  
      /** NEEDSDOC Field INPUTNAME          */
      public static final String INPUTNAME = "inputName";
  
      /** Name of the output file to be created. */
      public String outputName = null;
  
      /** NEEDSDOC Field OUTPUTNAME          */
      public static final String OUTPUTNAME = "outputName";
  
      /** Name of the gold file to compare output to. */
      public String goldName = null;
  
      /** NEEDSDOC Field GOLDNAME          */
      public static final String GOLDNAME = "goldName";
  
      /** Author or copyright info for the testfile. */
      public String author = null;
  
      /** NEEDSDOC Field AUTHOR          */
      public static final String AUTHOR = "author";
  
      /** Basic description of the testfile. */
      public String description = null;
  
      /** NEEDSDOC Field DESCRIPTION          */
      public static final String DESCRIPTION = "description";
  
      /** Any additional options (for future expansion). */
      public String options = null;
  
      /** NEEDSDOC Field OPTIONS          */
      public static final String OPTIONS = "options";
  
      /** No-arg constructor leaves everything null. */
      public TestfileInfo(){}
  
      /**
       * Initialize members from name=value pairs in Properties block.
       * Default value for each field is null.
       * @param Properties block to initialize from
       *
       * NEEDSDOC @param p
       */
      public TestfileInfo(Properties p)
      {
          load(p);
      }
  
      /**
       * Pass in a StringTokenizer-default-delimited string to initialize members.
       * <p>Members are read in order: inputName outputName goldName
       * author description options...
       * default value for each field is null</p>
       * @param String to initialize from
       *
       * NEEDSDOC @param inputStr
       */
      public TestfileInfo(String inputStr)
      {
  
          StringTokenizer st = new StringTokenizer(inputStr);
  
          load(st, null);
      }
  
      /**
       * Pass in a StringTokenizer-default-delimited string to initialize members.
       * <p>Members are read in order: inputName outputName goldName
       * author description options...
       * default value for each field is user-specified String</p>
       * @param String to initialize from
       * @param String to use as default for any un-specified value
       *
       * NEEDSDOC @param inputStr
       * NEEDSDOC @param defaultVal
       */
      public TestfileInfo(String inputStr, String defaultVal)
      {
  
          StringTokenizer st = new StringTokenizer(inputStr);
  
          load(st, defaultVal);
      }
  
      /**
       * Pass in a specified-delimited string to initialize members.
       * <p>Members are read in order: inputName outputName goldName
       * author description options...
       * default value for each field is user-specified String</p>
       * @param String to initialize from
       * @param String to use as default for any un-specified value
       * @param String to use as separator for StringTokenizer
       *
       * NEEDSDOC @param inputStr
       * NEEDSDOC @param defaultVal
       * NEEDSDOC @param separator
       */
      public TestfileInfo(String inputStr, String defaultVal, String separator)
      {
  
          StringTokenizer st = new StringTokenizer(inputStr, separator);
  
          load(st, defaultVal);
      }
  
      /**
       * Worker method to initialize members.
       *
       * NEEDSDOC @param st
       * NEEDSDOC @param defaultVal
       */
      public void load(StringTokenizer st, String defaultVal)
      {
  
          // Fill in as many items as are available; default the value otherwise
          // Note that order is important!
          if (st.hasMoreTokens())
              inputName = st.nextToken();
          else
              inputName = defaultVal;
  
          if (st.hasMoreTokens())
              outputName = st.nextToken();
          else
              outputName = defaultVal;
  
          if (st.hasMoreTokens())
              goldName = st.nextToken();
          else
              goldName = defaultVal;
  
          if (st.hasMoreTokens())
              author = st.nextToken();
          else
              author = defaultVal;
  
          if (st.hasMoreTokens())
              description = st.nextToken();
          else
              description = defaultVal;
  
          if (st.hasMoreTokens())
          {
              options = st.nextToken();
  
              // For now, simply glom all additional tokens into the options, until the end of string
              // Leave separated with a single space char for readability
              while (st.hasMoreTokens())
              {
                  options += " " + st.nextToken();
              }
          }
          else
              options = defaultVal;
      }
  
      /**
       * Initialize members from name=value pairs in Properties block.
       * Default value for each field is null.
       * @param Properties block to initialize from
       *
       * NEEDSDOC @param p
       */
      public void load(Properties p)
      {
  
          inputName = p.getProperty(INPUTNAME);
          outputName = p.getProperty(OUTPUTNAME);
          goldName = p.getProperty(GOLDNAME);
          author = p.getProperty(AUTHOR);
          description = p.getProperty(DESCRIPTION);
          options = p.getProperty(OPTIONS);
      }
  
      /**
       * Cheap-o debugging: return tab-delimited String of all our values.
       *
       * NEEDSDOC ($objectName$) @return
       */
      public String dump()
      {
          return (inputName + '\t' + outputName + '\t' + goldName + '\t'
                  + author + '\t' + description + '\t' + options);
      }
  }
  
  
  
  1.1                  xml-xalan/test/java/src/org/apache/qetest/TestImpl.java
  
  Index: TestImpl.java
  ===================================================================
  /*
   * The Apache Software License, Version 1.1
   *
   *
   * Copyright (c) 2000 The Apache Software Foundation.  All rights 
   * reserved.
   *
   * Redistribution and use in source and binary forms, with or without
   * modification, are permitted provided that the following conditions
   * are met:
   *
   * 1. Redistributions of source code must retain the above copyright
   *    notice, this list of conditions and the following disclaimer. 
   *
   * 2. Redistributions in binary form must reproduce the above copyright
   *    notice, this list of conditions and the following disclaimer in
   *    the documentation and/or other materials provided with the
   *    distribution.
   *
   * 3. The end-user documentation included with the redistribution,
   *    if any, must include the following acknowledgment:  
   *       "This product includes software developed by the
   *        Apache Software Foundation (http://www.apache.org/)."
   *    Alternately, this acknowledgment may appear in the software itself,
   *    if and wherever such third-party acknowledgments normally appear.
   *
   * 4. The names "Xalan" and "Apache Software Foundation" must
   *    not be used to endorse or promote products derived from this
   *    software without prior written permission. For written 
   *    permission, please contact apache@apache.org.
   *
   * 5. Products derived from this software may not be called "Apache",
   *    nor may "Apache" appear in their name, without prior written
   *    permission of the Apache Software Foundation.
   *
   * THIS SOFTWARE IS PROVIDED ``AS IS'' AND ANY EXPRESSED OR IMPLIED
   * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
   * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
   * DISCLAIMED.  IN NO EVENT SHALL THE APACHE SOFTWARE FOUNDATION OR
   * ITS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
   * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
   * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF
   * USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
   * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
   * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
   * OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
   * SUCH DAMAGE.
   * ====================================================================
   *
   * This software consists of voluntary contributions made by many
   * individuals on behalf of the Apache Software Foundation and was
   * originally based on software copyright (c) 2000, Lotus
   * Development Corporation., http://www.lotus.com.  For more
   * information on the Apache Software Foundation, please see
   * <http://www.apache.org/>.
   */
  
  /*
   *
   * TestImpl.java
   *
   */
  package org.apache.qetest;
  
  import java.util.Properties;
  
  /**
   * Minimal class defining a test implementation, using a Reporter.
   * <p>TestImpls generally interact with a Reporter, which reports
   * out in various formats the results from this test.
   * Most test classes should subclass from this test, as it adds
   * structure that helps to define the conceptual logic of running
   * a 'test'.  It also provides useful default implementations.</p>
   * <p>Users wishing a much simpler testing framework can simply
   * implement the minimal methods in the Test interface, and use a
   * Logger to report results instead of a Reporter.</p>
   * @author Shane_Curcuru@lotus.com
   * @version $Id: TestImpl.java,v 1.1 2000/11/01 23:30:51 curcuru Exp $
   */
  public class TestImpl implements Test
  {
  
      /**
       * Name (and description) of the current test.
       * <p>Note that these are merely convenience variables - you do not need
       * to use them.  If you do use them, they should be initialized at
       * construction time.</p>
       */
      protected String testName = null;
  
      /**
       * Accesor method for the name of this test.  
       *
       * NEEDSDOC ($objectName$) @return
       */
      public String getTestName()
      {
          return testName;
      }
  
      /** (Name and) description of the current test. */
      protected String testComment = null;
  
      /**
       * Accesor method for a brief description of this test.  
       *
       * NEEDSDOC ($objectName$) @return
       */
      public String getTestDescription()
      {
          return testComment;
      }
  
      /**
       * Default constructor - initialize testName, Comment.
       */
      public TestImpl()
      {
  
          // Only set them if they're not set
          if (testName == null)
              testName = "TestImpl.defaultName";
  
          if (testComment == null)
              testComment = "TestImpl.defaultComment";
      }
  
      /** Our Logger, who we tell all our secrets to. */
      protected Logger logger = null;
  
      /**
       * Accesor methods for our Logger.  
       *
       * NEEDSDOC @param l
       */
      public void setLogger(Logger l)
      {  // no-op: our implementation always uses a Reporter
      }
  
      /**
       * Accesor methods for our Logger.  
       *
       * NEEDSDOC ($objectName$) @return
       */
      public Logger getLogger()
      {
          return null;
      }
  
      /** Our Reporter, who we tell all our secrets to. */
      protected Reporter reporter;
  
      /**
       * Accesor methods for our Reporter.  
       *
       * NEEDSDOC @param r
       */
      public void setReporter(Reporter r)
      {
          if (r != null)
              reporter = r;
      }
  
      /**
       * Accesor methods for our Reporter.  
       *
       * NEEDSDOC ($objectName$) @return
       */
      public Reporter getReporter()
      {
          return reporter;
      }
  
      /** Flag to indicate a serious enough error that we should just give up. */
      protected boolean abortTest = false;
  
      /**
       * Accesor methods for our abort flag.  
       *
       * NEEDSDOC @param a
       */
      public void setAbortTest(boolean a)
      {
          abortTest = a;
      }
  
      /**
       * Accesor methods for our abort flag.  
       *
       * NEEDSDOC ($objectName$) @return
       */
      public boolean getAbortTest()
      {
          return (abortTest);
      }
  
      /**
       * Run this test: main interface to cause the test to run itself.
       * <p>A major goal of the TestImpl class is to separate the act and process
       * of writing a test from it's actual runtime implementation.  Testwriters
       * should not need to know how their test is being executed.</p>
       * <ul>They should simply focus on defining:
       * <li>doTestFileInit: what setup has to be done before running the test</li>
       * <li>testCase1, 2, ... n: individual, independent test cases</li>
       * <li>doTestFileClose: what cleanup has to be done after running the test</li>
       * </ul>
       * <p>This method returns a simple boolean status as a convenience.  In cases
       * where you have a harness that runs a great many tests that normally pass, the
       * harness can simply check this value for each test: if it's true, you could
       * even delete any result logs then, and simply print out a meta-log stating
       * that the test passed.  Note that this does not provide any information about
       * why a test failed (or caused an error, or whatever) - that's what the info in
       * any Reporter's logs are for.</p>
       * <p>If a test is aborted, then any containing harness needs not
       * finish executing the test.  Otherwise, even if part of a test fails,
       * you should let the whole test run through.</p>
       * <p>Harnesses should generally simply call runTest() to ask the
       * test to run itself.  In some cases a Harness might want to control
       * the process more closely, in which case it should call:
       * <code>
       *  test.setReporter(); // optional, depending on the test
       *  test.testFileInit();
       *  test.runTestCases();
       *  test.testFileClose();
       * </code>  instead.
       * @todo return TestResult instead of boolean flag
       * @author Shane_Curcuru@lotus.com
       *
       * NEEDSDOC @param p
       * @return status - true if test ran to completion and <b>all</b>
       * cases passed, false otherwise
       */
      public boolean runTest(Properties p)
      {
  
          boolean status = testFileInit(p);
  
          if (getAbortTest())
              return status;
  
          status &= runTestCases(p);
  
          if (getAbortTest())
              return status;
  
          status &= testFileClose(p);
  
          return status;
      }
  
      /**
       * Initialize this test - called once before running testcases.
       * Predefined behavior - subclasses should <b>not</b> override this method.
       * <p>This method is basically a composite that masks the most common
       * implementation: creating a reporter or logger first, then initializing
       * any data or product settings the test needs setup first. It does this
       * by separating this method into three methods:
       * <code>
       *   preTestFileInit(); // Create/initialize Reporter
       *   doTestFileInit();  // User-defined: initialize product under test
       *   postTestFileInit() // Report out we've completed initialization
       * </code>
       * </p>
       * @author Shane_Curcuru@lotus.com
       * @see #preTestFileInit(java.util.Properties)
       * @see #doTestFileInit(java.util.Properties)
       * @see #postTestFileInit(java.util.Properties)
       *
       * NEEDSDOC @param p
       *
       * NEEDSDOC ($objectName$) @return
       */
      public boolean testFileInit(Properties p)
      {
  
          // Note: we don't want to use shortcut operators here,
          //       since we want each method to get called
          // Pass the Property block to each method, so that 
          //       subclasses can do initialization whenever 
          //       is best for their design
          return preTestFileInit(p) & doTestFileInit(p) & postTestFileInit(p);
      }
  
      /**
       * Initialize this test - called once before running testcases.
       * <p>Create and initialize a Reporter here.</p>
       * <p>This implementation simply creates a default Reporter
       * and adds a ConsoleLogger. Most test groups will want to override
       * this method to create custom Reporters or Loggers.</p>
       * @author Shane_Curcuru@lotus.com
       * @see #testFileInit(java.util.Properties)
       *
       * NEEDSDOC @param p
       *
       * NEEDSDOC ($objectName$) @return
       */
      public boolean preTestFileInit(Properties p)
      {
  
          // Pass our properties block directly to the reporter
          //  so it can use the same values in initialization
          setReporter(new Reporter(p));
          reporter.addDefaultLogger();
          reporter.testFileInit(testName, testComment);
  
          return true;
      }
  
      /**
       * Initialize this test - called once before running testcases.
       * <p>Subclasses <b>must</b> override this to do whatever specific
       * processing they need to initialize their product under test.</p>
       * <p>If for any reason the test should not continue, it <b>must</b>
       * return false from this method.</p>
       * @author Shane_Curcuru@lotus.com
       * @see #testFileInit(java.util.Properties)
       *
       * NEEDSDOC @param p
       *
       * NEEDSDOC ($objectName$) @return
       */
      public boolean doTestFileInit(Properties p)
      {
  
          // @todo implement in your subclass
          reporter.logTraceMsg(
              "TestImpl.doTestFileInit() default implementation - please override");
  
          return true;
      }
  
      /**
       * Initialize this test - called once before running testcases.
       * <p>Simply log out that our initialization has completed,
       * so that structured-style logs will make it clear where startup
       * code ends and testCase code begins.</p>
       * @author Shane_Curcuru@lotus.com
       * @see #testFileInit(java.util.Properties)
       *
       * NEEDSDOC @param p
       *
       * NEEDSDOC ($objectName$) @return
       */
      public boolean postTestFileInit(Properties p)
      {
  
          reporter.logTraceMsg(
              "TestImpl.postTestFileInit() initialization complete");
  
          return true;
      }
  
      /**
       * Run all of our testcases.
       * Subclasses must override this method.  It should cause each testCase
       * in the test to be executed independently, and then return true if and
       * only if all testCases passed successfully.  If any testCase failed or
       * caused any unexpected errors, exceptions, etc., it should return false.
       * @author Shane_Curcuru@lotus.com
       *
       * NEEDSDOC @param p
       * @return true if all testCases passed, false otherwise
       */
      public boolean runTestCases(Properties p)
      {
  
          // @todo implement in your subclass
          reporter.logTraceMsg(
              "TestImpl.runTestCases() default implementation - please override");
  
          return true;
      }
  
      /**
       * Cleanup this test - called once after running testcases.
       * @author Shane_Curcuru@lotus.com
       *
       * NEEDSDOC @param p
       * @return true if cleanup successful, false otherwise
       */
      public boolean testFileClose(Properties p)
      {
  
          // Note: we don't want to use shortcut operators here,
          //       since we want each method to get called
          return preTestFileClose(p) & doTestFileClose(p)
                 & postTestFileClose(p);
      }
  
      /**
       * Log a trace message - called once after running testcases.
       * <p>Predefined behavior - subclasses should <B>not</B> override this method.</p>
       * @todo currently is primarily here to mark that we're closing
       * the test, in case doTestFileClose() blows up somehow.  May not be needed.
       * @author Shane_Curcuru@lotus.com
       * @see #testFileClose()
       *
       * NEEDSDOC @param p
       *
       * NEEDSDOC ($objectName$) @return
       */
      protected boolean preTestFileClose(Properties p)
      {
  
          // Have the reporter log a trace that the test is about to cleanup
          reporter.logTraceMsg("TestImpl.preTestFileClose()");
  
          return true;
      }
  
      /**
       * Cleanup this test - called once after running testcases.
       * <p>Subclasses <b>must</b> override this to do whatever specific
       * processing they need to cleanup after all testcases are run.</p>
       * @author Shane_Curcuru@lotus.com
       *
       * NEEDSDOC @param p
       *
       * NEEDSDOC ($objectName$) @return
       */
      public boolean doTestFileClose(Properties p)
      {
  
          // @todo implement in your subclass
          reporter.logTraceMsg(
              "TestImpl.doTestFileClose() default implementation - please override");
  
          return true;
      }
  
      /**
       * Mark the test complete - called once after running testcases.
       * <p>Predefined behavior - subclasses should <b>not</b> override
       * this method. Currently just tells our reporter to log the
       * testFileClose. This will calculate final results, and complete
       * logging for any structured output logs (like XML files).</p>
       * @author Shane_Curcuru@lotus.com
       * @see #testFileClose()
       *
       * NEEDSDOC @param p
       *
       * NEEDSDOC ($objectName$) @return
       */
      protected boolean postTestFileClose(Properties p)
      {
  
          // Have the reporter log out our completion
          reporter.testFileClose();
  
          return true;
      }
  
      /**
       * Main method to run test from the command line.
       * Test subclasses <B>must</B> override, obviously.
       * @author Shane Curcuru
       *
       * NEEDSDOC @param args
       */
      public static void main(String[] args)
      {
  
          TestImpl app = new TestImpl();
          Properties p = new Properties();
  
          p.put(MAIN_CMDLINE, args);
          app.runTest(p);
      }
  }  // end of class Test
  
  
  
  
  1.1                  xml-xalan/test/java/src/org/apache/qetest/XMLFileLogger.java
  
  Index: XMLFileLogger.java
  ===================================================================
  /*
   * The Apache Software License, Version 1.1
   *
   *
   * Copyright (c) 2000 The Apache Software Foundation.  All rights 
   * reserved.
   *
   * Redistribution and use in source and binary forms, with or without
   * modification, are permitted provided that the following conditions
   * are met:
   *
   * 1. Redistributions of source code must retain the above copyright
   *    notice, this list of conditions and the following disclaimer. 
   *
   * 2. Redistributions in binary form must reproduce the above copyright
   *    notice, this list of conditions and the following disclaimer in
   *    the documentation and/or other materials provided with the
   *    distribution.
   *
   * 3. The end-user documentation included with the redistribution,
   *    if any, must include the following acknowledgment:  
   *       "This product includes software developed by the
   *        Apache Software Foundation (http://www.apache.org/)."
   *    Alternately, this acknowledgment may appear in the software itself,
   *    if and wherever such third-party acknowledgments normally appear.
   *
   * 4. The names "Xalan" and "Apache Software Foundation" must
   *    not be used to endorse or promote products derived from this
   *    software without prior written permission. For written 
   *    permission, please contact apache@apache.org.
   *
   * 5. Products derived from this software may not be called "Apache",
   *    nor may "Apache" appear in their name, without prior written
   *    permission of the Apache Software Foundation.
   *
   * THIS SOFTWARE IS PROVIDED ``AS IS'' AND ANY EXPRESSED OR IMPLIED
   * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
   * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
   * DISCLAIMED.  IN NO EVENT SHALL THE APACHE SOFTWARE FOUNDATION OR
   * ITS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
   * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
   * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF
   * USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
   * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
   * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
   * OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
   * SUCH DAMAGE.
   * ====================================================================
   *
   * This software consists of voluntary contributions made by many
   * individuals on behalf of the Apache Software Foundation and was
   * originally based on software copyright (c) 2000, Lotus
   * Development Corporation., http://www.lotus.com.  For more
   * information on the Apache Software Foundation, please see
   * <http://www.apache.org/>.
   */
  
  /*
   *
   * XMLFileLogger.java
   *
   */
  package org.apache.qetest;
  
  import java.io.File;
  import java.io.FileWriter;
  import java.io.IOException;
  import java.io.PrintWriter;
  
  import java.util.Date;
  import java.util.Enumeration;
  import java.util.Hashtable;
  import java.util.Properties;
  
  /**
   * Logger that saves output to a simple XML-format file.
   * @todo improve escapeString so it's more rigorous about escaping
   * @author Shane_Curcuru@lotus.com
   * @version $Id: XMLFileLogger.java,v 1.1 2000/11/01 23:30:52 curcuru Exp $
   */
  public class XMLFileLogger implements Logger
  {
  
      //-----------------------------------------------------
      //-------- Constants for results file structure --------
      //-----------------------------------------------------
  
      /** XML tagnames for results file structure. */
      public static final String ELEM_RESULTSFILE = "resultsfile";
  
      /** NEEDSDOC Field ELEM_TESTFILE          */
      public static final String ELEM_TESTFILE = "testfile";
  
      /** NEEDSDOC Field ELEM_FILERESULT          */
      public static final String ELEM_FILERESULT = "fileresult";
  
      /** NEEDSDOC Field ELEM_TESTCASE          */
      public static final String ELEM_TESTCASE = "testcase";
  
      /** NEEDSDOC Field ELEM_CASERESULT          */
      public static final String ELEM_CASERESULT = "caseresult";
  
      /** NEEDSDOC Field ELEM_CHECKRESULT          */
      public static final String ELEM_CHECKRESULT = "checkresult";
  
      /** NEEDSDOC Field ELEM_STATISTIC          */
      public static final String ELEM_STATISTIC = "statistic";
  
      /** NEEDSDOC Field ELEM_LONGVAL          */
      public static final String ELEM_LONGVAL = "longval";
  
      /** NEEDSDOC Field ELEM_DOUBLEVAL          */
      public static final String ELEM_DOUBLEVAL = "doubleval";
  
      /** NEEDSDOC Field ELEM_MESSAGE          */
      public static final String ELEM_MESSAGE = "message";
  
      /** NEEDSDOC Field ELEM_ARBITRARY          */
      public static final String ELEM_ARBITRARY = "arbitrary";
  
      /** NEEDSDOC Field ELEM_HASHTABLE          */
      public static final String ELEM_HASHTABLE = "hashtable";
  
      /** NEEDSDOC Field ELEM_HASHITEM          */
      public static final String ELEM_HASHITEM = "hashitem";
  
      /** XML attribute names for results file structure. */
      public static final String ATTR_LEVEL = "level";
  
      /** NEEDSDOC Field ATTR_DESC          */
      public static final String ATTR_DESC = "desc";
  
      /** NEEDSDOC Field ATTR_TIME          */
      public static final String ATTR_TIME = "time";
  
      /** NEEDSDOC Field ATTR_RESULT          */
      public static final String ATTR_RESULT = "result";
  
      /** NEEDSDOC Field ATTR_KEY          */
      public static final String ATTR_KEY = "key";
  
      /** NEEDSDOC Field ATTR_FILENAME          */
      public static final String ATTR_FILENAME = OPT_LOGFILE;
  
      //-----------------------------------------------------
      //-------- Class members and accessors --------
      //-----------------------------------------------------
  
      /** If we're ready to start outputting yet. */
      protected boolean ready = false;
  
      /** If an error has occoured in this Logger. */
      protected boolean error = false;
  
      /** If we should flush after every logTestCaseClose. */
      protected boolean flushOnCaseClose = true;
  
      /**
       * Accessor for flushing; is set from properties.  
       *
       * NEEDSDOC ($objectName$) @return
       */
      public boolean getFlushOnCaseClose()
      {
          return (flushOnCaseClose);
      }
  
      /**
       * Accessor for flushing; is set from properties.  
       *
       * NEEDSDOC @param b
       */
      public void setFlushOnCaseClose(boolean b)
      {
          flushOnCaseClose = b;
      }
  
      /** If we have output anything yet. */
      protected boolean anyOutput = false;
  
      /** Name of the file we're outputing to. */
      protected String fileName = null;
  
      /** File reference and other internal convenience variables. */
      protected File reportFile;
  
      /** File reference and other internal convenience variables. */
      protected FileWriter reportWriter;
  
      /** File reference and other internal convenience variables. */
      protected PrintWriter reportPrinter;
  
      /** Generic properties for this logger; sort-of replaces instance variables. */
      protected Properties loggerProps = null;
  
      //-----------------------------------------------------
      //-------- Control and utility routines --------
      //-----------------------------------------------------
  
      /** Simple constructor, does not perform initialization. */
      public XMLFileLogger()
      { /* no-op */
      }
  
      /**
       * Constructor calls initialize(p).
       * @param p Properties block to initialize us with.
       */
      public XMLFileLogger(Properties p)
      {
          ready = initialize(p);
      }
  
      /**
       * Return a description of what this Logger does.
       * @return "reports results in XML to specified fileName".
       */
      public String getDescription()
      {
          return ("org.apache.qetest.XMLFileLogger - reports results in XML to specified fileName.");
      }
  
      /**
       * Returns information about the Property name=value pairs
       * that are understood by this Logger: fileName=filename.
       * @return same as {@link java.applet.Applet.getParameterInfo}.
       */
      public String[][] getParameterInfo()
      {
  
          String pinfo[][] =
          {
              { OPT_LOGFILE, "String",
                "Name of file to use for output; required" }
          };
  
          return pinfo;
      }
  
      /**
       * Accessor methods for our properties block.  
       *
       * NEEDSDOC ($objectName$) @return
       */
      public Properties getProperties()
      {
          return loggerProps;
      }
  
      /**
       * Accessor methods for our properties block.
       * @param p Properties to set (is cloned).
       */
      public void setProperties(Properties p)
      {
  
          if (p != null)
          {
              loggerProps = (Properties) p.clone();
          }
      }
  
      /**
       * Initialize this XMLFileLogger.
       * Must be called before attempting to log anything.
       * Opens a FileWriter for our output, and logs Record format:
       * <pre>&lt;resultfile fileName="<i>name of result file</i>"&gt;</pre>
       *
       * NEEDSDOC @param p
       *
       * NEEDSDOC ($objectName$) @return
       */
      public boolean initialize(Properties p)
      {
  
          setProperties(p);
  
          fileName = loggerProps.getProperty(OPT_LOGFILE, fileName);
  
          if ((fileName == null) || fileName.equals(""))
          {
  
              // We don't have a valid file, so bail
              error = true;
              ready = false;
  
              System.err.println("XMLFileLogger.initialize() ERROR: "
                                 + OPT_LOGFILE + " is blank");
  
              return false;
          }
  
          // Create a file and ensure it has a place to live
          reportFile = new File(fileName);
  
          // Note: bare filenames may not have parents, so catch and ignore exceptions
          try
          {
              File parent = new File(reportFile.getParent());
  
              if ((!parent.mkdirs()) && (!parent.exists()))
              {
  
                  // Couldn't create or find the directory for the file to live in, so bail
                  error = true;
                  ready = false;
  
                  System.err.println(
                      "XMLFileLogger.initialize() WARNING: cannot create directories: "
                      + fileName);
  
                  // Don't return yet: see if the reportWriter can still create the file later
                  // return(false);
              }
          }
          catch (Exception e)
          {
  
              // No-op: ignore if the parent's not there; trust that the file will get created later
          }
  
          try
          {
              reportWriter = new FileWriter(reportFile);
          }
          catch (IOException e)
          {
              System.err.println("XMLFileLogger.initialize() EXCEPTION: "
                                 + e.toString());
              e.printStackTrace();
  
              error = true;
              ready = false;
  
              return false;
          }
  
          reportPrinter = new PrintWriter(reportWriter);
          ready = true;
  
          return startResultsFile();
      }
  
      /**
       * Is this Logger ready to log results?
       * @return status - true if it's ready to report, false otherwise
       */
      public boolean isReady()
      {
  
          // Ensure our underlying logger, if one, is still OK
          if ((reportPrinter != null) && reportPrinter.checkError())
          {
  
              // NEEDSWORK: should we set ready = false in this case?
              //            errors in the PrintStream are not necessarily fatal
              error = true;
              ready = false;
          }
  
          return ready;
      }
  
      /**
       * Is this Logger still running OK?
       * @return status - true if an error has occoured, false if it's still working fine
       */
      public boolean checkError()
      {
  
          // Ensure our underlying logger, if one, is still OK
          if ((reportPrinter != null) && reportPrinter.checkError())
          {
  
              // NEEDSWORK: should we set ready = false in this case?
              //            errors in the PrintStream are not necessarily fatal
              error = true;
          }
  
          return error;
      }
  
      /** Flush this logger - ensure our File is flushed. */
      public void flush()
      {
  
          if (isReady())
          {
              reportPrinter.flush();
          }
      }
  
      /**
       * Close this logger - ensure our File, etc. are closed.
       * Record format:
       * <pre>&lt;/resultfile&gt;</pre>
       */
      public void close()
      {
  
          flush();
  
          if (isReady())
          {
              closeResultsFile();
              reportPrinter.close();
          }
  
          ready = false;
      }
  
      /**
       * worker method to dump the xml header and open the resultsfile element.  
       *
       * NEEDSDOC ($objectName$) @return
       */
      protected boolean startResultsFile()
      {
  
          if (isReady())
          {
  
              // Write out XML header and root test result element
              reportPrinter.println("<?xml version=\"1.0\"?>");
  
              // Note: this tag is closed in our .close() method, which the caller had better call!
              reportPrinter.println("<" + ELEM_RESULTSFILE + " "
                                    + ATTR_FILENAME + "=\"" + fileName + "\">");
  
              return true;
          }
          else
              return false;
      }
  
      /**
       * worker method to close the resultsfile element.  
       *
       * NEEDSDOC ($objectName$) @return
       */
      protected boolean closeResultsFile()
      {
  
          if (isReady())
          {
              reportPrinter.println("</" + ELEM_RESULTSFILE + ">");
  
              return true;
          }
          else
              return false;
      }
  
      //-----------------------------------------------------
      //-------- Testfile / Testcase start and stop routines --------
      //-----------------------------------------------------
  
      /**
       * Report that a testfile has started.
       * Begins a testfile element.  Record format:
       * <pre>&lt;testfile desc="<i>test description</i>" time="<i>timestamp</i>"&gt;</pre>
       * @param name file name or tag specifying the test.
       * @param comment comment about the test.
       */
      public void testFileInit(String name, String comment)
      {
  
          if (isReady())
          {
              reportPrinter.println("<" + ELEM_TESTFILE + " " + ATTR_DESC
                                    + "=\""
                                    + escapeString(name + ":" + comment)
                                    + "\" " + ATTR_TIME + "=\""
                                    + (new Date()).toString() + "\">");
          }
      }
  
      /**
       * Report that a testfile has finished, and report it's result; flushes output.
       * Ends a testfile element.  Record format:
       * <pre>&lt;fileresult desc="<i>test description</i>" result="<i>pass/fail status</i>" time="<i>timestamp</i>"&gt;
       * &lt;/testfile&gt;</pre>
       * @param msg message to log out
       * @param result result of testfile
       */
      public void testFileClose(String msg, String result)
      {
  
          if (isReady())
          {
              reportPrinter.println("<" + ELEM_FILERESULT + " " + ATTR_DESC
                                    + "=\"" + escapeString(msg) + "\" "
                                    + ATTR_RESULT + "=\"" + result + "\" "
                                    + ATTR_TIME + "=\""
                                    + (new Date()).toString() + "\"/>");
              reportPrinter.println("</" + ELEM_TESTFILE + ">");
          }
  
          flush();
      }
  
      /** Optimization: for heavy use methods, form pre-defined constants to save on string concatenation. */
      private static final String TESTCASEINIT_HDR = "<" + ELEM_TESTCASE + " "
                                                         + ATTR_DESC + "=\"";
  
      /**
       * Report that a testcase has begun.
       * Begins a testcase element.  Record format:
       * <pre>&lt;testcase desc="<i>case description</i>"&gt;</pre>
       * @param msg message to log out
       *
       * NEEDSDOC @param comment
       */
      public void testCaseInit(String comment)
      {
  
          if (isReady())
          {
              reportPrinter.println(TESTCASEINIT_HDR + escapeString(comment)
                                    + "\">");
          }
      }
  
      /** NEEDSDOC Field TESTCASECLOSE_HDR          */
      private static final String TESTCASECLOSE_HDR = "<" + ELEM_CASERESULT
                                                          + " " + ATTR_DESC
                                                          + "=\"";
  
      /**
       * Report that a testcase has finished, and report it's result.
       * Optionally flushes output. Ends a testcase element.   Record format:
       * <pre>&lt;caseresult desc="<i>case description</i>" result="<i>pass/fail status</i>"&gt;
       * &lt;/testcase&gt;</pre>
       * @param msg message of name of test case to log out
       * @param result result of testfile
       */
      public void testCaseClose(String msg, String result)
      {
  
          if (isReady())
          {
              reportPrinter.println(TESTCASECLOSE_HDR + escapeString(msg)
                                    + "\" " + ATTR_RESULT + "=\"" + result
                                    + "\"/>");
              reportPrinter.println("</" + ELEM_TESTCASE + ">");
          }
  
          if (getFlushOnCaseClose())
              flush();
      }
  
      //-----------------------------------------------------
      //-------- Test results logging routines --------
      //-----------------------------------------------------
  
      /** NEEDSDOC Field MESSAGE_HDR          */
      private static final String MESSAGE_HDR = "<" + ELEM_MESSAGE + " "
                                                + ATTR_LEVEL + "=\"";
  
      /**
       * Report a comment to result file with specified severity.
       * Record format: <pre>&lt;message level="##"&gt;msg&lt;/message&gt;</pre>
       * @param level severity or class of message.
       * @param msg comment to log out.
       */
      public void logMsg(int level, String msg)
      {
  
          if (isReady())
          {
              reportPrinter.print(MESSAGE_HDR + level + "\">");
              reportPrinter.print(escapeString(msg));
              reportPrinter.println("</" + ELEM_MESSAGE + ">");
          }
      }
  
      /** NEEDSDOC Field ARBITRARY_HDR          */
      private static final String ARBITRARY_HDR = "<" + ELEM_ARBITRARY + " "
                                                  + ATTR_LEVEL + "=\"";
  
      /**
       * Report an arbitrary String to result file with specified severity.
       * Appends and prepends \\n newline characters at the start and
       * end of the message to separate it from the tags.
       * Record format: <pre>&lt;arbitrary level="##"&gt;&lt;![CDATA[
       * msg
       * ]]&gt;&lt;/arbitrary&gt;</pre>
       *
       * Note that arbitrary messages are always wrapped in CDATA
       * sections to ensure that any non-valid XML is wrapped.  This needs
       * to be investigated for other elements as well (i.e. we should set a
       * standard for what Logger calls must be well-formed or not).
       * @param level severity or class of message.
       * @param msg arbitrary String to log out.
       * @todo investigate <b>not</b> fully escaping this string, since
       * it does get wrappered in CDATA
       */
      public void logArbitrary(int level, String msg)
      {
  
          if (isReady())
          {
              reportPrinter.println(ARBITRARY_HDR + level + "\"><![CDATA[");
              reportPrinter.println(escapeString(msg));
              reportPrinter.println("]]></" + ELEM_ARBITRARY + ">");
          }
      }
  
      /** NEEDSDOC Field STATISTIC_HDR          */
      private static final String STATISTIC_HDR = "<" + ELEM_STATISTIC + " "
                                                  + ATTR_LEVEL + "=\"";
  
      /**
       * Logs out statistics to result file with specified severity.
       * Record format: <pre>&lt;statistic level="##" desc="msg"&gt;&lt;longval&gt;1234&lt;/longval&gt;&lt;doubleval&gt;1.234&lt;/doubleval&gt;&lt;/statistic&gt;</pre>
       * @param level severity of message.
       * @param lVal statistic in long format.
       * @param dVal statistic in double format.
       * @param msg comment to log out.
       */
      public void logStatistic(int level, long lVal, double dVal, String msg)
      {
  
          if (isReady())
          {
              reportPrinter.print(STATISTIC_HDR + level + "\" " + ATTR_DESC
                                  + "=\"" + escapeString(msg) + "\">");
              reportPrinter.print("<" + ELEM_LONGVAL + ">" + lVal + "</"
                                  + ELEM_LONGVAL + ">");
              reportPrinter.print("<" + ELEM_DOUBLEVAL + ">" + dVal + "</"
                                  + ELEM_DOUBLEVAL + ">");
              reportPrinter.println("</" + ELEM_STATISTIC + ">");
          }
      }
  
      /**
       * Logs out a element to results with specified severity.
       * Uses user-supplied element name and attribute list.  Currently
       * attribute values and msg are forced .toString().  Also,
       * 'level' is forced to be the first attribute of the element.
       * Record format:
       * <pre>&lt;<i>element_text</i> level="##"
       * attribute1="value1"
       * attribute2="value2"
       * attribute<i>n</i>="value<i>n</i>"&gt;
       * msg
       * &lt;/<i>element_text</i>&gt;</pre>
       * @author Shane_Curcuru@lotus.com
       * @param level severity of message.
       * @param element name of enclosing element
       * @param attrs hash of name=value attributes; note that the
       * caller must ensure they're legal XML
       * @param msg Object to log out .toString(); caller should
       * ensure it's legal XML (no CDATA is supplied)
       */
      public void logElement(int level, String element, Hashtable attrs,
                             Object msg)
      {
  
          if (isReady())
          {
              reportPrinter.println("<" + element + " " + ATTR_LEVEL + "=\""
                                    + level + "\"");
  
              for (Enumeration enum = attrs.keys();
                      enum.hasMoreElements(); /* no increment portion */ )
              {
                  Object key = enum.nextElement();
  
                  reportPrinter.println(key.toString() + "=\""
                                        + attrs.get(key).toString() + "\"");
              }
  
              reportPrinter.print(">");
              reportPrinter.println(msg.toString());
              reportPrinter.print("></" + element + ">");
          }
      }
  
      /** NEEDSDOC Field HASHTABLE_HDR          */
      private static final String HASHTABLE_HDR = "<" + ELEM_HASHTABLE + " "
                                                  + ATTR_LEVEL + "=\"";
  
      // Note the HASHITEM_HDR indent; must be updated if we ever switch to another indenting method.
  
      /** NEEDSDOC Field HASHITEM_HDR          */
      private static final String HASHITEM_HDR = "  <" + ELEM_HASHITEM + " "
                                                 + ATTR_KEY + "=\"";
  
      /**
       * Logs out contents of a Hashtable with specified severity.
       * Indents each hashitem within the table.
       * Record format: <pre>&lt;hashtable level="##" desc="msg"/&gt;
       * &nbsp;&nbsp;&lt;hashitem key="key1"&gt;value1&lt;/hashitem&gt;
       * &nbsp;&nbsp;&lt;hashitem key="key2"&gt;value2&lt;/hashitem&gt;
       * &lt;/hashtable&gt;</pre>
       *
       * @param level severity or class of message.
       * @param hash Hashtable to log the contents of.
       * @param msg decription of the Hashtable.
       */
      public void logHashtable(int level, Hashtable hash, String msg)
      {
  
          if (isReady())
          {
              reportPrinter.println(HASHTABLE_HDR + level + "\" " + ATTR_DESC
                                    + "=\"" + msg + "\">");
  
              if (hash == null)
              {
                  reportPrinter.print("<" + ELEM_HASHITEM + " " + ATTR_KEY
                                      + "=\"null\">");
                  reportPrinter.println("</" + ELEM_HASHITEM + ">");
              }
  
              try
              {
                  for (Enumeration enum = hash.keys();
                          enum.hasMoreElements(); /* no increment portion */ )
                  {
                      Object key = enum.nextElement();
  
                      // Ensure we'll have clean output by pre-fetching value before outputting anything
                      String value = hash.get(key).toString();
  
                      reportPrinter.print(HASHITEM_HDR + key.toString()
                                          + "\">");
                      reportPrinter.print(value);
                      reportPrinter.println("</" + ELEM_HASHITEM + ">");
                  }
              }
              catch (Exception e)
              {
  
                  // No-op: should ensure we have clean output
              }
  
              reportPrinter.println("</" + ELEM_HASHTABLE + ">");
          }
      }
  
      //-----------------------------------------------------
      //-------- Test results reporting check* routines --------
      //-----------------------------------------------------
  
      /** NEEDSDOC Field CHECKPASS_HDR          */
      private static final String CHECKPASS_HDR = "<" + ELEM_CHECKRESULT + " "
                                                  + ATTR_RESULT + "=\""
                                                  + Reporter.PASS + "\" "
                                                  + ATTR_DESC + "=\"";
  
      /**
       * Writes out a Pass record with comment.
       * Record format: <pre>&lt;checkresult result="PASS" desc="comment"/&gt;</pre>
       * @param comment comment to log with the pass record.
       */
      public void checkPass(String comment)
      {
  
          if (isReady())
          {
              reportPrinter.println(CHECKPASS_HDR + escapeString(comment)
                                    + "\"/>");
          }
      }
  
      /** NEEDSDOC Field CHECKAMBG_HDR          */
      private static final String CHECKAMBG_HDR = "<" + ELEM_CHECKRESULT + " "
                                                  + ATTR_RESULT + "=\""
                                                  + Reporter.AMBG + "\" "
                                                  + ATTR_DESC + "=\"";
  
      /**
       * Writes out an ambiguous record with comment.
       * Record format: <pre>&lt;checkresult result="AMBG" desc="comment"/&gt;</pre>
       * @param comment comment to log with the ambg record.
       */
      public void checkAmbiguous(String comment)
      {
  
          if (isReady())
          {
              reportPrinter.println(CHECKAMBG_HDR + escapeString(comment)
                                    + "\"/>");
          }
      }
  
      /** NEEDSDOC Field CHECKFAIL_HDR          */
      private static final String CHECKFAIL_HDR = "<" + ELEM_CHECKRESULT + " "
                                                  + ATTR_RESULT + "=\""
                                                  + Reporter.FAIL + "\" "
                                                  + ATTR_DESC + "=\"";
  
      /**
       * Writes out a Fail record with comment.
       * Record format: <pre>&lt;checkresult result="FAIL" desc="comment"/&gt;</pre>
       * @param comment comment to log with the fail record.
       */
      public void checkFail(String comment)
      {
  
          if (isReady())
          {
              reportPrinter.println(CHECKFAIL_HDR + escapeString(comment)
                                    + "\"/>");
          }
      }
  
      /** NEEDSDOC Field CHECKERRR_HDR          */
      private static final String CHECKERRR_HDR = "<" + ELEM_CHECKRESULT + " "
                                                  + ATTR_RESULT + "=\""
                                                  + Reporter.ERRR + "\" "
                                                  + ATTR_DESC + "=\"";
  
      /**
       * Writes out a Error record with comment.
       * Record format: <pre>&lt;checkresult result="ERRR" desc="comment"/&gt;</pre>
       * @param comment comment to log with the error record.
       */
      public void checkErr(String comment)
      {
  
          if (isReady())
          {
              reportPrinter.println(CHECKERRR_HDR + escapeString(comment)
                                    + "\"/>");
          }
      }
  
      //-----------------------------------------------------
      //-------- Worker routines for XML string escaping --------
      //-----------------------------------------------------
  
      /**
       * Lifted from org.apache.xml.serialize.transition.XMLSerializer
       *
       * NEEDSDOC @param ch
       *
       * NEEDSDOC ($objectName$) @return
       */
      protected String getEntityRef(char ch)
      {
  
          // Encode special XML characters into the equivalent character references.
          // These five are defined by default for all XML documents.
          switch (ch)
          {
          case '<' :
              return "lt";
          case '>' :
              return "gt";
          case '"' :
              return "quot";
          case '\'' :
              return "apos";
          case '&' :
              return "amp";
          }
  
          return null;
      }
  
      /**
       * Identifies the last printable character in the Unicode range
       * that is supported by the encoding used with this serializer.
       * For 8-bit encodings this will be either 0x7E or 0xFF.
       * For 16-bit encodings this will be 0xFFFF. Characters that are
       * not printable will be escaped using character references.
       * Lifted from org.apache.xml.serialize.transition.BaseMarkupSerializer
       */
      private int _lastPrintable = 0x7E;
  
      /**
       * Lifted from org.apache.xml.serialize.transition.BaseMarkupSerializer
       *
       * NEEDSDOC @param ch
       *
       * NEEDSDOC ($objectName$) @return
       */
      protected String printEscaped(char ch)
      {
  
          String charRef;
  
          // If there is a suitable entity reference for this
          // character, print it. The list of available entity
          // references is almost but not identical between
          // XML and HTML.
          charRef = getEntityRef(ch);
  
          if (charRef != null)
          {
  
              //_printer.printText( '&' );        // SC note we need to return a String for 
              //_printer.printText( charRef );    //    someone else to serialize
              //_printer.printText( ';' );
              return "&" + charRef + ";";
          }
          else if ((ch >= ' ' && ch <= _lastPrintable && ch != 0xF7)
                   || ch == '\n' || ch == '\r' || ch == '\t')
          {
  
              // If the character is not printable, print as character reference.
              // Non printables are below ASCII space but not tab or line
              // terminator, ASCII delete, or above a certain Unicode threshold.
              //_printer.printText( ch );
              return String.valueOf(ch);
          }
          else
          {
  
              //_printer.printText( "&#" );
              //_printer.printText( Integer.toString( ch ) );
              //_printer.printText( ';' );
              return "&#" + Integer.toString(ch) + ";";
          }
      }
  
      /**
       * Escapes a string so it may be printed as text content or attribute
       * value. Non printable characters are escaped using character references.
       * Where the format specifies a deault entity reference, that reference
       * is used (e.g. <tt>&amp;lt;</tt>).
       * Lifted from org.apache.xml.serialize.transition.BaseMarkupSerializer
       *
       * @param source The string to escape
       * @return String after escaping - needed for our application
       */
      protected String escapeString(String source)  // protected void printEscaped( String source )
      {
  
          StringBuffer sb = new StringBuffer();
          final int n = source.length();
  
          for (int i = 0; i < n; ++i)
          {
  
              //char c = source.charAt( i );
              sb.append(printEscaped(source.charAt(i)));
          }
  
          return sb.toString();
      }
  }  // end of class XMLFileLogger