<scope>system</scope>
<systemPath>${project.basedir}/../lib/commons/commons.jar</systemPath>
</dependency>
+ <dependency>
+ <groupId>org.aspectj</groupId>
+ <artifactId>testing</artifactId>
+ <version>${project.version}</version>
+ <type>test-jar</type>
+ </dependency>
</dependencies>
</project>
+++ /dev/null
-/* *******************************************************************
- * Copyright (c) 2002 Palo Alto Research Center, Incorporated (PARC),
- * 2003 Contributors.
- * All rights reserved.
- * This program and the accompanying materials are made available
- * under the terms of the Eclipse Public License v1.0
- * which accompanies this distribution and is available at
- * http://www.eclipse.org/legal/epl-v10.html
- *
- * Contributors:
- * Xerox/PARC initial implementation
- * Wes Isberg 2003 changes.
- * ******************************************************************/
-
-package org.aspectj.testing.drivers;
-
-import org.aspectj.bridge.IMessage;
-import org.aspectj.bridge.IMessageHolder;
-import org.aspectj.bridge.MessageHandler;
-import org.aspectj.bridge.MessageUtil;
-import org.aspectj.testing.harness.bridge.AbstractRunSpec;
-import org.aspectj.testing.harness.bridge.AjcTest;
-import org.aspectj.testing.harness.bridge.CompilerRun;
-import org.aspectj.testing.harness.bridge.FlatSuiteReader;
-import org.aspectj.testing.harness.bridge.IncCompilerRun;
-import org.aspectj.testing.harness.bridge.JavaRun;
-import org.aspectj.testing.harness.bridge.RunSpecIterator;
-import org.aspectj.testing.harness.bridge.Sandbox;
-import org.aspectj.testing.harness.bridge.Validator;
-import org.aspectj.testing.run.IRun;
-import org.aspectj.testing.run.IRunIterator;
-import org.aspectj.testing.run.IRunListener;
-import org.aspectj.testing.run.IRunStatus;
-import org.aspectj.testing.run.IRunValidator;
-import org.aspectj.testing.run.RunListener;
-import org.aspectj.testing.run.RunStatus;
-import org.aspectj.testing.run.RunValidator;
-import org.aspectj.testing.run.Runner;
-import org.aspectj.testing.util.BridgeUtil;
-import org.aspectj.testing.util.RunUtils;
-import org.aspectj.testing.util.StreamsHandler;
-import org.aspectj.testing.util.StreamsHandler.Result;
-import org.aspectj.testing.xml.AjcSpecXmlReader;
-import org.aspectj.testing.xml.XMLWriter;
-import org.aspectj.util.FileUtil;
-import org.aspectj.util.LangUtil;
-
-import java.io.BufferedReader;
-import java.io.File;
-import java.io.FileReader;
-import java.io.IOException;
-import java.io.PrintStream;
-import java.io.PrintWriter;
-import java.text.DecimalFormat;
-import java.text.SimpleDateFormat;
-import java.util.*;
-
-/**
- * Test harness for running AjcTest.Suite test suites.
- * This can be easily extended by subclassing.
- * <ul>
- * <li>template algorithms for reading arguments, printing syntax,
- * reading suites, and reporting results all
- * delegate to methods that subclasses can override to support
- * additional arguments or different reporting.</li>
- * <li>implement arbitrary features as IRunListeners</li>
- * <li>support single-option aliases to any number of single-options </li>
- * </ul>
- * See {@link report(IRunStatus, int, int)} for an explanation of test result
- * categories.
- */
-public class Harness {
- /**
- * Spaces up to the width that an option should take in the syntax,
- * including the two-space leader
- */
- protected static final String SYNTAX_PAD = " ";
- protected static final String OPTION_DELIM = ";";
- private static final String JAVA_VERSION;
- private static final String ASPECTJ_VERSION;
- static {
- String version = "UNKNOWN";
- try { version = System.getProperty("java.version", "UNKNOWN"); }
- catch (Throwable t) {}
- JAVA_VERSION = version;
-
- version = "UNKNOWN";
- try {
- Class c = Class.forName("org.aspectj.bridge.Version");
- version = (String) c.getField("text").get(null);
- } catch (Throwable t) {
- // ignore
- }
- ASPECTJ_VERSION = version;
- }
-
- /** factory for the subclass currently anointed as default */
- public static Harness makeHarness() {
- return new FeatureHarness();
- }
-
- /** @param args String[] like runMain(String[]) args */
- public static void main(String[] args) throws Exception {
- if (LangUtil.isEmpty(args)) {
- File argFile = new File("HarnessArgs.txt");
- if (argFile.canRead()) {
- args = readArgs(argFile);
- } else {
- args = new String[] { "-help" };
- }
- }
- makeHarness().runMain(args, null);
- }
-
- /**
- * Get known option aliases.
- * Subclasses may add new aliases, where the key is the alias option,
- * and the value is a comma-delimited String of target options.
- * @return Properties with feature aliases or null
- */
- protected static Properties getOptionAliases() {
- if (null == optionAliases) {
- optionAliases = new Properties();
- // XXX load from **OptionAliases.properties
- }
- return optionAliases;
- }
-
- /**
- * Read argFile contents into String[],
- * delimiting at any whitespace
- */
- private static String[] readArgs(File argFile) {
- ArrayList args = new ArrayList();
-// int lineNum = 0;
-
- try {
- BufferedReader stream =
- new BufferedReader(new FileReader(argFile));
- String line;
- while (null != (line = stream.readLine())) {
- StringTokenizer st = new StringTokenizer(line);
- while (st.hasMoreTokens()) {
- args.add(st.nextToken());
- }
- }
- } catch (IOException e) {
- e.printStackTrace(System.err);
- }
- return (String[]) args.toArray(new String[0]);
- }
-
- /** aliases key="option" value="option{,option}" */
- private static Properties optionAliases;
-
- /** be extra noisy if true */
- private boolean verboseHarness;
-
- /** be extra quiet if true */
- private boolean quietHarness;
-
- /** just don't say anything! */
- protected boolean silentHarness;
-
- /** map of feature names to features */
- private HashMap features;
-
- /** if true, do not delete temporary files. */
- private boolean keepTemp;
-
- /** if true, delete temporary files as each test completes. */
- private boolean killTemp;
-
- /** if true, then log results in report(..) when done */
- private boolean logResults;
-
- /** if true and there were failures, do System.exit({numFailures})*/
- private boolean exitOnFailure;
-
- protected Harness() {
- features = new HashMap();
- }
-
-
- /**
- * Entry point for a test.
- * This reads in the arguments,
- * creates the test suite(s) from the input file(s),
- * and for each suite does setup, run, report, and cleanup.
- * When arguments are read, any option ending with "-" causes
- * option variants, a set of args with and another without the
- * option. See {@link LangUtil.optionVariants(String[])} for
- * more details.
- * @param args the String[] for the test suite - use -help to get options,
- * and use "-" suffixes for variants.
- * @param resultList List for IRunStatus results - ignored if null
- */
- public void runMain(String[] args, List resultList) {
- LangUtil.throwIaxIfFalse(!LangUtil.isEmpty(args), "empty args");
- // read arguments
- final ArrayList globals = new ArrayList();
- final ArrayList files = new ArrayList();
- final LinkedList argList = new LinkedList();
- argList.addAll(Arrays.asList(args));
- for (int i = 0; i < argList.size(); i++) {
- String arg = (String) argList.get(i);
- List aliases = aliasOptions(arg);
- if (!LangUtil.isEmpty(aliases)) {
- argList.remove(i);
- argList.addAll(i, aliases);
- i--;
- continue;
- }
- if ("-help".equals(arg)) {
- logln("java " + Harness.class.getName() + " {option|suiteFile}..");
- printSyntax(getLogStream());
- return;
- } else if (isSuiteFile(arg)) {
- files.add(arg);
- } else if (!acceptOption(arg)) {
- globals.add(arg);
- } // else our options absorbed
- }
- if (0 == files.size()) {
- logln("## Error reading arguments: at least 1 suite file required");
- logln("java " + Harness.class.getName() + " {option|suiteFile}..");
- printSyntax(getLogStream());
- return;
- }
- String[] globalOptions = (String[]) globals.toArray(new String[0]);
- String[][] globalOptionVariants = optionVariants(globalOptions);
- AbstractRunSpec.RT runtime = new AbstractRunSpec.RT();
- if (verboseHarness) {
- runtime.setVerbose(true);
- }
-
- // run suites read from each file
- AjcTest.Suite.Spec spec;
- for (Iterator iter = files.iterator(); iter.hasNext();) {
- File suiteFile = new File((String) iter.next());
- if (!suiteFile.canRead()) {
- logln("runMain(..) cannot read file: " + suiteFile);
- continue;
- }
- if (null == (spec = readSuite(suiteFile))) {
- logln("runMain(..) cannot read suite from file: " + suiteFile);
- continue;
- }
-
- MessageHandler holder = new MessageHandler();
- for (int i = 0; i < globalOptionVariants.length; i++) {
- runtime.setOptions(globalOptionVariants[i]);
- holder.init();
- boolean skip = !spec.adoptParentValues(runtime, holder);
- // awful/brittle assumption about number of skips == number of skip messages
- final List skipList = MessageUtil.getMessages(holder, IMessage.INFO, false, "skip");
- if ((verboseHarness || skip || (0 < skipList.size()))) {
- final List curArgs = Arrays.asList(globalOptionVariants[i]);
- logln("runMain(" + suiteFile + ", " + curArgs + ")");
- if (verboseHarness) {
- String format = "yyyy.MM.dd G 'at' hh:mm:ss a zzz";
- SimpleDateFormat formatter = new SimpleDateFormat (format);
- String date = formatter.format(new Date());
- logln("test date: " + date);
- logln("harness features: " + listFeatureNames());
- logln("Java version: " + JAVA_VERSION);
- logln("AspectJ version: " + ASPECTJ_VERSION);
- }
- if (!(quietHarness || silentHarness) && holder.hasAnyMessage(null, true)) {
- MessageUtil.print(getLogStream(), holder, "skip - ");
- MessageUtil.printMessageCounts(getLogStream(), holder, "skip - ");
- }
- }
- if (!skip) {
- doStartSuite(suiteFile);
- long elapsed = 0;
- RunResult result = null;
- try {
- final long startTime = System.currentTimeMillis();
- result = run(spec);
- if (null != resultList) {
- resultList.add(result);
- }
- elapsed = System.currentTimeMillis() - startTime;
- report(result.status, skipList.size(), result.numIncomplete, elapsed);
- } finally {
- doEndSuite(suiteFile,elapsed);
- }
- if (exitOnFailure) {
- int numFailures = RunUtils.numFailures(result.status, true);
- if (0 < numFailures) {
- System.exit(numFailures);
- }
- Object value = result.status.getResult();
- if ((value instanceof Boolean)
- && !((Boolean) value).booleanValue()) {
- System.exit(-1);
- }
- }
- }
- }
- }
- }
-
-
- /**
- * Tell all IRunListeners that we are about to start a test suite
- * @param suiteFile
- * @param elapsed
- */
- private void doEndSuite(File suiteFile, long elapsed) {
- Collection c = features.values();
- for (Iterator iter = c.iterator(); iter.hasNext();) {
- Feature element = (Feature) iter.next();
- if (element.listener instanceof TestCompleteListener) {
- ((TestCompleteListener)element.listener).doEndSuite(suiteFile,elapsed);
- }
- }
- }
- /**
- * Generate variants of String[] options by creating an extra set for
- * each option that ends with "-". If none end with "-", then an
- * array equal to <code>new String[][] { options }</code> is returned;
- * if one ends with "-", then two sets are returned,
- * three causes eight sets, etc.
- * @return String[][] with each option set.
- * @throws IllegalArgumentException if any option is null or empty.
- */
- public static String[][] optionVariants(String[] options) {
- if ((null == options) || (0 == options.length)) {
- return new String[][] { new String[0]};
- }
- // be nice, don't stomp input
- String[] temp = new String[options.length];
- System.arraycopy(options, 0, temp, 0, temp.length);
- options = temp;
- boolean[] dup = new boolean[options.length];
- int numDups = 0;
-
- for (int i = 0; i < options.length; i++) {
- String option = options[i];
- if (LangUtil.isEmpty(option)) {
- throw new IllegalArgumentException("empty option at " + i);
- }
- if (option.endsWith("-")) {
- options[i] = option.substring(0, option.length()-1);
- dup[i] = true;
- numDups++;
- }
- }
- final String[] NONE = new String[0];
- final int variants = exp(2, numDups);
- final String[][] result = new String[variants][];
- // variant is a bitmap wrt doing extra value when dup[k]=true
- for (int variant = 0; variant < variants; variant++) {
- ArrayList next = new ArrayList();
- int nextOption = 0;
- for (int k = 0; k < options.length; k++) {
- if (!dup[k] || (0 != (variant & (1 << (nextOption++))))) {
- next.add(options[k]);
- }
- }
- result[variant] = (String[]) next.toArray(NONE);
- }
- return result;
- }
-
- private static int exp(int base, int power) { // not in Math?
- if (0 > power) {
- throw new IllegalArgumentException("negative power: " + power);
- }
- int result = 1;
- while (0 < power--) {
- result *= base;
- }
- return result;
- }
-
- /**
- * @param suiteFile
- */
- private void doStartSuite(File suiteFile) {
- Collection c = features.values();
- for (Iterator iter = c.iterator(); iter.hasNext();) {
- Feature element = (Feature) iter.next();
- if (element.listener instanceof TestCompleteListener) {
- ((TestCompleteListener)element.listener).doStartSuite(suiteFile);
- }
- }
- }
-
- /** Run the test suite specified by the spec */
- protected RunResult run(AjcTest.Suite.Spec spec) {
- LangUtil.throwIaxIfNull(spec, "spec");
- /*
- * For each run, initialize the runner and validator,
- * create a new set of IRun{Iterator} tests,
- * and run them.
- * Delete all temp files when done.
- */
- Runner runner = new Runner();
- if (0 != features.size()) {
- for (Iterator iter = features.entrySet().iterator(); iter.hasNext();) {
- Feature feature = (Feature) ((Map.Entry) iter.next()).getValue();
- runner.registerListener(feature.clazz, feature.listener);
- }
- }
- IMessageHolder holder = new MessageHandler();
- int numIncomplete = 0;
- RunStatus status = new RunStatus(holder, runner);
- status.setIdentifier(spec);
- // validator is used for all setup in entire tree...
- Validator validator = new Validator(status);
- if (!killTemp) {
- validator.lock(this);
- }
- Sandbox sandbox = null;
- try {
- sandbox = new Sandbox(spec.getSuiteDirFile(), validator);
- IRunIterator tests = spec.makeRunIterator(sandbox, validator);
- runner.runIterator(tests, status, null);
- if (tests instanceof RunSpecIterator) {
- numIncomplete = ((RunSpecIterator) tests).getNumIncomplete();
- }
- } finally {
- if (!keepTemp) {
- if (!killTemp) {
- validator.unlock(this);
- }
- validator.deleteTempFiles(verboseHarness);
- }
- }
- return new RunResult(status, numIncomplete);
- }
-
- /**
- * Report the results of a test run after it is completed.
- * Clients should be able to identify the number of:
- * <ul>
- * <li>tests run and passed</li>
- * <li>tests failed, i.e., run and not passed (fail, error, etc.)</li>
- * <li>tests incomplete, i.e., test definition read but test run setup failed</li>
- * <li>tests skipped, i.e., test definition read and found incompatible with
- * the current configuration.</li>
- * <ul>
- *
- * @param status returned from the run
- * @param numSkipped int tests that were skipped because of
- * configuration incompatibilities
- * @param numIncomplete int tests that failed during setup,
- * usually indicating a test definition or configuration error.
- * @param msElapsed elapsed time in milliseconds
- * */
- protected void report(IRunStatus status, int numSkipped, int numIncomplete,
- long msElapsed ) {
- if (logResults) {
- RunUtils.AJCSUITE_PRINTER.printRunStatus(getLogStream(), status);
- } else if (!(quietHarness || silentHarness) && (0 < status.numMessages(null, true))) {
- if (!silentHarness) {
- MessageUtil.print(getLogStream(), status, "");
- }
- }
-
- logln(BridgeUtil.childString(status, numSkipped, numIncomplete)
- + " " + (msElapsed/1000) + " seconds");
-
- }
-
- // --------------- delegate methods
- protected void logln(String s) {
- if (!silentHarness) {
- getLogStream().println(s);
- }
- }
-
- protected PrintStream getLogStream() {
- return System.out;
- }
-
- protected boolean isSuiteFile(String arg) {
- return ((null != arg)
- && (arg.endsWith(".txt") || arg.endsWith(".xml"))
- && new File(arg).canRead());
- }
-
- /**
- * Get the options that the input option is an alias for.
- * Subclasses may add options directly to the getFeatureAliases result
- * or override this.
- * @return null if the input is not an alias for other options,
- * or a non-empty List (String) of options that this option is an alias for
- */
- protected List aliasOptions(String option) {
- Properties aliases = Harness.getOptionAliases();
- if (null != aliases) {
- String args = aliases.getProperty(option);
- if (!LangUtil.isEmpty(args)) {
- return LangUtil.anySplit(args, OPTION_DELIM);
- }
- }
- return null;
- }
-
- /**
- * Read and implement any of our options.
- * Options other than this and suite files will be
- * passed down as parent options through the test spec hierarchy.
- * Subclasses override this to implement new options.
- */
- protected boolean acceptOption(String option) {
-// boolean result = false;
- if (LangUtil.isEmpty(option)) {
- return true; // skip bad input
- } else if ("-verboseHarness".equals(option)) {
- verboseHarness = true;
- } else if ("-quietHarness".equals(option)) {
- quietHarness = true;
- } else if ("-silentHarness".equals(option)) {
- silentHarness = true;
- } else if ("-keepTemp".equals(option)) {
- keepTemp = true;
- } else if ("-killTemp".equals(option)) {
- killTemp = true;
- } else if ("-logResults".equals(option)) {
- logResults = true;
- } else if ("-exitOnFailure".equals(option)) {
- exitOnFailure = true;
- } else {
- return false;
- }
- return true;
- }
-
- /**
- * Read a test suite file.
- * This implementation knows how to read .txt and .xml files
- * and logs any errors.
- * Subclasses override this to read new kinds of suites.
- * @return null if unable to read (logging errors) or AjcTest.Suite.Spec otherwise
- */
- protected AjcTest.Suite.Spec readSuite(File suiteFile) {
- if (null != suiteFile) {
- String path = suiteFile.getPath();
- try {
- if (path.endsWith(".xml")) {
- return AjcSpecXmlReader.getReader().readAjcSuite(suiteFile);
- } else if (path.endsWith(".txt")) {
- return FlatSuiteReader.ME.readSuite(suiteFile);
- } else {
- logln("unrecognized extension? " + path);
- }
- } catch (IOException e) {
- e.printStackTrace(getLogStream());
- }
- }
- return null;
- }
-
- /** Add feature to take effect during the next runMain(..) invocation.
- * @param feature the Feature to add, using feature.name as key.
- */
- protected void addFeature(Feature feature) {
- if (null != feature) {
- features.put(feature.name, feature);
- }
- }
-
- /** remove feature by name (same as feature.name) */
- protected void removeFeature(String name) {
- if (!LangUtil.isEmpty(name)) {
- features.remove(name);
- }
- }
-
- /** @return unmodifiable Set of feature names */
- protected Set listFeatureNames() {
- return Collections.unmodifiableSet(features.keySet());
- }
-
- /** print detail message for syntax of main(String[]) command-line */
- protected void printSyntax(PrintStream out) {
- out.println(" {??} unrecognized options are used as test spec globals");
- out.println(" -help print this help message");
- out.println(" -verboseHarness harness components log verbosely");
- out.println(" -quietHarness harness components suppress logging");
- out.println(" -keepTemp do not delete temporary files");
- out.println(" -logResults log results at end, verbosely if fail");
- out.println(" -exitOnFailure do System.exit({num-failures}) if suite fails");
- out.println(" {suiteFile}.xml.. specify test suite XML file");
- out.println(" {suiteFile}.txt.. specify test suite .txt file (deprecated)");
- }
-
- /** print known aliases at the end of the syntax message */
- protected void printAliases(PrintStream out) {
- LangUtil.throwIaxIfNull(out, "out");
- Properties props = getOptionAliases();
- if (null == props) {
- return;
- }
- int pdLength = SYNTAX_PAD.length();
- Set entries = props.entrySet();
- for (Iterator iter = entries.iterator(); iter.hasNext();) {
- Map.Entry entry = (Map.Entry) iter.next();
- String alias = " " + (String) entry.getKey();
- int buf = pdLength - alias.length();
- if (0 < buf) {
- alias += SYNTAX_PAD.substring(0, buf);
- } else {
- alias += " ";
- }
- out.println(alias + entry.getValue());
- }
- }
-
- /** result struct for run(AjcTest.Spec) */
- public static class RunResult {
- public final IRunStatus status;
- public final int numIncomplete;
- public RunResult(IRunStatus status, int numIncomplete) {
- this.status = status;
- this.numIncomplete = numIncomplete;
- }
- }
- /** feature implemented as named IRunIterator/IRun association */
- public static class Feature {
- /** never null, always assignable to IRun */
- public final Class clazz;
-
- /** never null */
- public final IRunListener listener;
-
- /** never null or empty */
- public final String name;
-
- /** @throws IllegalArgumentException if any is null/empty or clazz is
- * not assignable to IRun
- */
- public Feature(String name, Class clazz, IRunListener listener) {
- LangUtil.throwIaxIfNull(clazz, "class");
- if (!IRun.class.isAssignableFrom(clazz)
- && !IRunIterator.class.isAssignableFrom(clazz)) {
- String s = clazz.getName() + "is not assignable to IRun or IRunIterator";
- LangUtil.throwIaxIfFalse(false, s);
- }
- LangUtil.throwIaxIfNull(listener, "listener");
- LangUtil.throwIaxIfNull(name, "name");
- LangUtil.throwIaxIfFalse(0 < name.length(), "empty name");
- this.clazz = clazz;
- this.listener = listener;
- this.name = name;
- }
-
- /** @return feature name */
- public String toString() {
- return name;
- }
- }
-}
-
-
-/**
- * Harness with features for controlling output
- * (logging results and hiding streams).
- * Use -help to get a list of feature options.
- */
-class FeatureHarness extends Harness {
-
- private static final String[] ALIASES = new String[]
- { "-hideStreams",
- "-hideCompilerStreams"
- + OPTION_DELIM + "-hideRunStreams",
- "-jim",
- "-logMinFail"
- + OPTION_DELIM + "-hideStreams",
- "-loud",
- "-verboseHarness",
- "-baseline",
- "-verboseHarness"
- + OPTION_DELIM + "-traceTestsMin"
- + OPTION_DELIM + "-hideStreams",
- "-release",
- "-baseline"
- + OPTION_DELIM + "-ajctestSkipKeywords=knownLimitation,purejava",
- "-junit",
- "-silentHarness" + OPTION_DELIM + "-logJUnit" + OPTION_DELIM +
- "-hideStreams",
- "-cruisecontrol",
- "-junit" + OPTION_DELIM + "-ajctestSkipKeywords=knownLimitation,purejava"
- };
- static {
- Properties optionAliases = Harness.getOptionAliases();
- if (null != optionAliases) {
- for (int i = 1; i < ALIASES.length; i += 2) {
- optionAliases.put(ALIASES[i-1], ALIASES[i]);
- }
- }
- }
-
- /** controller for suppressing and sniffing error and output streams. */
- StreamsHandler streamsHandler;
-
- /** facility of hiding-streams may be applied in many features */
- IRunListener streamHider;
-
- /** facility of capture/log may be applied in many features */
- IRunListener captureLogger;
-
- /** when making tests, do not run them */
- TestMaker testMaker;
-
- public FeatureHarness() {
- super();
- streamsHandler = new StreamsHandler(false, true);
- }
- /** override to make tests or run as usual */
- protected RunResult run(AjcTest.Suite.Spec spec) {
- if (null != testMaker) {
- System.out.println("generating rather than running tests...");
- return testMaker.run(spec);
- } else {
- return super.run(spec);
- }
- }
-
- /**
- * Log via StreamsHandler-designated log stream.
- * @see org.aspectj.testing.drivers.Harness#log(String)
- */
- protected void logln(String s) {
- if (!silentHarness)
- streamsHandler.lnlog(s);
- }
-
- /**
- * @see org.aspectj.testing.drivers.Harness#getLogStream()
- * @return StreamsHandler-designated log stream.
- */
- protected PrintStream getLogStream() {
- return streamsHandler.out;
- }
-
-
- /** print detail message for syntax of main(String[]) command-line */
- protected void printSyntax(PrintStream out) {
- super.printSyntax(out);
- out.println(" -progressDots log . or ! for each AjcTest pass or fail");
- out.println(" -logFail log each failed AjcTest");
- out.println(" -logPass log each passed AjcTest");
- out.println(" -logAll log each AjcTest");
- out.println(" -logMinFail log each AjcTest failure with minimal excess data");
- out.println(" -logMinPass log each AjcTest success with minimal excess data");
- out.println(" -logMinAll log all AjcTest with minimal excess data");
- out.println(" -logXMLFail log XML definition for each failed AjcTest");
- out.println(" -logXMLPass log XML definition for each passed AjcTest");
- out.println(" -logXMLAll log XML definition for each AjcTest");
- out.println(" -logJUnit log all tests in JUnit XML report style");
- out.println(" -hideRunStreams hide err/out streams during java runs");
- out.println(" -hideCompilerStreams hide err/out streams during compiler runs");
- out.println(" -traceTests log pass|fail, /time/memory taken after each test");
- out.println(" -traceTestsMin log pass|fail after each test");
- out.println(" -XmakeTests create source files/dirs for initial compile run of each test");
- out.println(" -XlogPublicType log test XML if \"public type\" in an error message");
- out.println(" -XlogSourceIn=Y,Z log test XML if Y or Z is in path of any sources");
- super.printAliases(out);
- }
-
- /** Accept a number of logging and output options */
- protected boolean acceptOption(String option) {
- if (null == option) {
- return false;
- }
-
- final StreamsHandler streams = streamsHandler;
- final IRunValidator validator = RunValidator.NORMAL;
- final RunUtils.IRunStatusPrinter verbose
- = RunUtils.VERBOSE_PRINTER;
- final RunUtils.IRunStatusPrinter terse
- = RunUtils.TERSE_PRINTER;
-// final boolean LOGPASS = true;
-// final boolean LOGFAIL = true;
-// final boolean SKIPPASS = false;
-// final boolean SKIPFAIL = false;
-// final boolean LOGSTREAMS = true;
- final boolean SKIPSTREAMS = false;
-
- Feature feature = null;
- if (super.acceptOption(option)) {
- // ok, result returned below
-
- } else if ("-XmakeTests".equals(option)) {
- testMaker = TestMaker.ME;
- } else if (option.startsWith("-traceTestsMin")) {
- feature = new Feature(option, AjcTest.class,new TestTraceLogger(streams, false));
- } else if (option.startsWith("-traceTests")) {
- feature = new Feature(option, AjcTest.class,new TestTraceLogger(streams, true));
- } else if (option.startsWith("-logMin")) {
- feature = new Feature(option, AjcTest.class,
- new RunLogger(option, SKIPSTREAMS, streams, validator, terse));
- } else if (option.startsWith("-logXML")) {
- feature = new Feature(option, AjcTest.class,
- new XmlLogger(option, streams, validator));
- } else if (option.startsWith("-logJUnit")) {
- feature = new Feature(option, AjcTest.class,
- new JUnitXMLLogger(option,streams,validator));
- } else if (option.startsWith("-log")) {
- feature = new Feature(option, AjcTest.class,
- new RunLogger(option, SKIPSTREAMS, streams, validator, verbose));
- } else if ("-hideRunStreams".equals(option)) {
- feature = new Feature(option, JavaRun.class, getStreamHider());
- } else if ("-hideCompilerStreams".equals(option)) {
- addFeature(new Feature(option, IncCompilerRun.class, getStreamHider())); // hmmm
- feature = new Feature(option, CompilerRun.class, getStreamHider());
- } else if ("-progressDots".equals(option)) {
- IRunListener listener = new RunListener() {
- public void runCompleted(IRunStatus run) {
- streamsHandler.log((validator.runPassed(run) ? "." : "!"));
- }
- };
- feature = new Feature(option, AjcTest.class, listener);
- } else if (option.startsWith("-XlogPublicType")) {
- String label = option + TestCompleteListener.PASS; // print when validator true
- feature = new Feature(option, AjcTest.class,
- new XmlLogger(label, streams, MessageRunValidator.PUBLIC_TYPE_ERROR));
- } else if (option.startsWith("-XlogSourceIn")) {
- String input = option.substring("-XlogSourceIn=".length());
- LangUtil.throwIaxIfFalse(0 < input.length(), option);
- String label = "-XlogSourceIn=" + TestCompleteListener.PASS; // print when validator true
- StringRunner sr = new SubstringRunner(input, false);
- feature = new Feature(option, AjcTest.class,
- new XmlLogger(label, streams, new SourcePathValidator(sr)));
- } else {
- return false;
- }
- addFeature(feature);
- return true;
- }
-
- /** lazy construction for shared hider */
- protected IRunListener getStreamHider() {
- if (null == streamHider) {
- streamHider = new RunListener() {
- public void runStarting(IRunStatus run) {
- streamsHandler.hide();
- }
- public void runCompleted(IRunStatus run) {
- streamsHandler.show();
- }
- public String toString() { return "Harness StreamHider"; }
- };
- }
- return streamHider;
- }
-}
-
-/** Generate any needed test case files for any test. */
-class TestMaker {
-
- static TestMaker ME = new TestMaker();
-
- /** @throws Error if unable to make dir */
- static void mkdirs(File dir) {
- if (null != dir && !dir.exists()) {
- if (!dir.mkdirs()) {
- throw new Error("unable to make dir: " + dir);
- }
- }
- }
- static String getFileContents(File baseDir, File file, String label) {
- String fileName = file.getName();
- if (fileName.endsWith(".java")) {
- fileName = fileName.substring(0, fileName.length() - 5);
- }
- StringBuffer sb = new StringBuffer();
- String filePath = file.getParentFile().getAbsolutePath();
- String dirPath = baseDir.getAbsolutePath();
- String pack = null;
- if (filePath.startsWith(dirPath)) {
- pack = filePath.substring(dirPath.length()).replace('/', '.');
- }
- if (!LangUtil.isEmpty(pack)) {
- sb.append("package " + pack + ";");
- }
- final String EOL = "\n"; // XXX find discovered EOL
- sb.append( EOL
- + EOL + "import org.aspectj.testing.Tester;"
- + EOL + ""
- + EOL + "/** @testcase " + label + " */"
- + EOL + "public class " + fileName + " {"
- + EOL + "\tpublic static void main(String[] args) { "
- + EOL + "\t\tTester.check(null != args, \"null args\"); "
- + EOL + "\t}"
- + EOL + "}"
- + EOL
- );
-
- return sb.toString();
- }
-
- /** create a minimal source file for a test */
- static void createSrcFile(File baseDir, File file, String testName) {
- if (file.exists()) {
- return;
- }
- String contents = getFileContents(baseDir, file, testName);
- String error = FileUtil.writeAsString(file, contents);
- if (null != error) {
- throw new Error(error);
- }
- }
-
- /** create an empty arg file for a test */
- static void createArgFile(File baseDir, File file, String testName) {
- if (file.exists()) {
- return;
- }
- String contents = "// argfile " + file;
- String error = FileUtil.writeAsString(file, contents);
- if (null != error) {
- throw new Error(error);
- }
- }
-
- public Harness.RunResult run(AjcTest.Suite.Spec spec) {
- ArrayList kids = spec.getChildren();
- for (Iterator iter = kids.iterator(); iter.hasNext();) {
- makeTest( (AjcTest.Spec) iter.next());
- }
- IRunStatus status = new RunStatus(new MessageHandler(), new Runner());
- status.start();
- status.finish(IRunStatus.PASS);
- return new Harness.RunResult(status, 0);
- }
-
- private void makeTest(AjcTest.Spec spec) {
- CompilerRun.Spec compileSpec = AjcTest.unwrapCompilerRunSpec(spec);
- if (null == spec) {
- throw new Error("null spec");
- }
- System.out.println(" generating test files for test: " + spec.getDescription());
- File dir = spec.getSuiteDir();
- if (null != dir) {
- TestMaker.mkdirs(dir);
- }
- String offset = spec.getTestDirOffset();
- if (!LangUtil.isEmpty(offset)) {
- if (null == dir) {
- dir = new File(offset);
- } else {
- dir = new File(dir.getAbsolutePath() + "/" + offset);
- }
- } else if (null == dir) {
- dir = new File(".");
- }
- StringBuffer testName = new StringBuffer();
- int pr = spec.getBugId();
- if (0 < pr) {
- testName.append("PR#" + pr + " ");
- }
-
- testName.append(spec.getDescription());
- final String label = testName.toString();
- final File[] srcFiles = FileUtil.getBaseDirFiles(dir, compileSpec.getPathsArray());
- if (!LangUtil.isEmpty(srcFiles)) {
- for (int i = 0; i < srcFiles.length; i++) {
- TestMaker.createSrcFile(dir, srcFiles[i], label);
- }
- }
- final File[] argFiles = FileUtil.getBaseDirFiles(dir, compileSpec.getArgfilesArray());
- if (!LangUtil.isEmpty(argFiles)) {
- for (int i = 0; i < argFiles.length; i++) {
- TestMaker.createArgFile(dir, argFiles[i], label);
- }
- }
-
- }
-
- /** @return "Testmaker()" */
- public String toString() {
- return "TestMaker()";
- }
-}
-
-interface StringRunner {
- boolean accept(String s);
-}
-
-/**
- * StringRunner than accepts input matching 0+ substrings,
- * optionally case-insensitive.
- */
-class SubstringRunner implements StringRunner {
- private static String[] extractSubstrings(
- String substrings,
- boolean caseSensitive) {
- if (null == substrings) {
- return null;
- }
- StringTokenizer st = new StringTokenizer(substrings, ",");
- String[] result = new String[st.countTokens()];
- for (int i = 0; i < result.length; i++) {
- result[i] = st.nextToken().trim();
- LangUtil.throwIaxIfFalse(0 < result[i].length(), "empty entry");
- if (!caseSensitive) {
- result[i] = result[i].toLowerCase();
- }
- }
- return result;
- }
-
- private final String[] substrings;
- private final boolean caseSensitive;
-
- /**
- * @param substrings the String containing comma-separated substrings
- * to find in input - if null, any input accepted
- * @param caseSensitive if true, do case-sensitive comparison
- * @throws IllegalArgumentException if any substrings contains empty entry ", ,"
- */
- SubstringRunner(String substrings, boolean caseSensitive) {
- this.caseSensitive = caseSensitive;
- this.substrings = extractSubstrings(substrings, caseSensitive);
- }
-
- public boolean accept(String input) {
- if (null == substrings) {
- return true;
- }
- if (null == input) {
- return false;
- }
-
- if (!caseSensitive) {
- input = input.toLowerCase();
- }
- for (int i = 0; i < substrings.length; i++) {
- if (-1 != input.indexOf(substrings[i])) {
- return true;
- }
- }
- return false;
- }
-}
-
-/**
- * Signal whether run "passed" based on validating absolute source paths.
- * (Static evaluation - no run necessary)
- */
-class SourcePathValidator implements IRunValidator { // static - no run needed
- private final StringRunner validator;
- // XXX hoist common
- SourcePathValidator(StringRunner validator) {
- LangUtil.throwIaxIfNull(validator, "validator");
- this.validator = validator;
- }
- /**
- * @return true if any source files in compile spec are
- * accepted by the validator.
- * @see org.aspectj.testing.run.IRunValidator#runPassed(IRunStatus)
- */
- public boolean runPassed(IRunStatus run) {
- AjcTest.Spec testSpec = AjcTest.unwrapSpec(run);
- if (null != testSpec) {
- CompilerRun.Spec compileSpec = AjcTest.unwrapCompilerRunSpec(testSpec);
- File basedir = new File(testSpec.getSuiteDir(), testSpec.getTestDirOffset());
- String[] paths = compileSpec.getPathsArray();
- File[] files = FileUtil.getBaseDirFiles(basedir, paths);
- for (int i = 0; i < files.length; i++) {
- if (validator.accept(files[i].getAbsolutePath())) {
- return true;
- }
- }
- }
- return false;
- }
-
-}
-
-/** Signal whether run "passed" based on message kind and content */
-class MessageRunValidator implements IRunValidator {
-
- /** signals "passed" if any error contains "public type" */
- static final IRunValidator PUBLIC_TYPE_ERROR
- = new MessageRunValidator("public type", IMessage.ERROR, false);
-
- private final IMessage.Kind kind;
- private final String sought;
- private final boolean orGreater;
-
- /**
- * @param sought the String to seek anywhere in any message of the right kind
- * if null, accept any message of the right kind.
- * @param kind the IMessage.Kind of messages to search - all if null
- */
- MessageRunValidator(String sought, IMessage.Kind kind, boolean orGreater) {
- this.sought = sought;
- this.kind = kind;
- this.orGreater = orGreater;
- }
-
- /** @return true if this run has messages of the right kind and text */
- public boolean runPassed(IRunStatus run) {
- return gotMessage(new IRunStatus[] {run});
- }
-
- /**
- * Search these children and their children recursively
- * for messages of the right kind and content.
- * @return true at first match of message of the right kind and content
- */
- private boolean gotMessage(IRunStatus[] children) {
- if (LangUtil.isEmpty(children)) {
- return false;
- }
- for (int i = 0; i < children.length; i++) {
- IRunStatus run = children[i];
- if (null == run) {
- continue; // hmm
- }
- IMessage[] messages = run.getMessages(kind, orGreater);
- if (!LangUtil.isEmpty(messages)) {
- if (LangUtil.isEmpty(sought)) {
- return true;
- } else {
- for (int j = 0; j < messages.length; j++) {
- if (null == messages[j]) {
- continue; // hmm
- }
- String text = messages[j].getMessage();
- if ((null != text) && (-1 != text.indexOf(sought))) {
- return true;
- }
- }
- }
- }
- if (gotMessage(run.getChildren())) {
- return true;
- }
- }
- return false;
- }
-}
-
-/**
- * Base class for listeners that run depending on pass/fail status of input.
- * Template method runCompleted handled whether to run.
- * Subclasses implement doRunCompleted(..).
- */
-abstract class TestCompleteListener extends RunListener {
- /** label suffix indicating both pass and fail */
- public static final String ALL = "All";
-
- /** label suffix indicating fail */
- public static final String FAIL = "Fail";
-
- /** label suffix indicating pass */
- public static final String PASS = "Pass";
-
-
- /** runValidator determines if a given run passed */
- protected final IRunValidator runValidator;
-
- /** label for this listener */
- final String label;
-
- /** if trun and run passed, then run doRunCompleted(..) */
- final boolean logOnPass;
-
- /** if true and run did not pass, then run doRunCompleted(..) */
- final boolean logOnNotPass;
-
- /** may be null */
- protected final StreamsHandler streamsHandler;
-
- /** true if the last run evaluation was ok */
- boolean lastRunOk;
-
- /** last run evaluated */
- IRunStatus lastRun; // XXX small memory leak - cache hashcode instead?
-
- /** @param label endsWith PASS || FAIL || ALL */
- protected TestCompleteListener(
- String label,
- IRunValidator runValidator,
- StreamsHandler streamsHandler) {
- if (null == runValidator) {
- runValidator = RunValidator.NORMAL;
- }
- this.label = (null == label? "" : label);
- this.logOnPass = label.endsWith(PASS) || label.endsWith(ALL);
- this.logOnNotPass = label.endsWith(FAIL) || label.endsWith(ALL);
- this.runValidator = runValidator;
- this.streamsHandler = streamsHandler;
- }
-
- public void runStarted(IRunStatus run) {
- if (null != streamsHandler) {
- streamsHandler.startListening();
- }
- }
-
- /** subclasses implement this to do some per-test initialization */
- protected void doRunStarted(IRunStatus run) {
- }
-
-
- /** subclasses implement this to do some per-suite initialization */
- protected void doStartSuite(File suite) {
- }
-
- /** subclasses implement this to do end-of-suite processing */
- protected void doEndSuite(File suite, long duration) {
- }
-
- public final void runCompleted(IRunStatus run) {
- boolean doit = lastRunOk(run);
- StreamsHandler.Result result = null;
- if (null != streamsHandler) {
- streamsHandler.endListening(doit);
- }
- if (doit) {
- doRunCompleted(run, result);
- }
- }
-
- /**
- * @return true if run is ok per constructor specifications
- */
- protected boolean lastRunOk(IRunStatus run) {
- if (lastRun != run) {
- boolean passed = runValidator.runPassed(run);
- lastRunOk = (passed ? logOnPass : logOnNotPass);
- }
- return lastRunOk;
- }
-
- /** @return "{classname}({pass}{,fail})" indicating when this runs */
- public String toString() { // XXX add label?
- return LangUtil.unqualifiedClassName(this)
- + "(" + (logOnPass ? (logOnNotPass ? "pass, fail)" : "pass)")
- : (logOnNotPass ? "fail)" : ")"));
- }
- /**
- * Subclasses implement this to do some completion action
- * @param run the IRunStatus for this completed run
- * @param result the StreamsHandler.Result (if any - may be null)
- */
- public abstract void doRunCompleted(IRunStatus run, StreamsHandler.Result result);
-}
-
-/**
- * Write XML for any test passed and/or failed.
- * Must register with Runner for RunSpecIterator.class,
- * most sensibly AjcTest.class.
- */
-class XmlLogger extends TestCompleteListener {
- /**
- * @param printer the component that prints any status - not null
- * @param runValidator if null, use RunValidator.NORMAL
- */
- public XmlLogger(
- String label,
- StreamsHandler streamsHandler,
- IRunValidator runValidator) {
- super(label, runValidator, streamsHandler);
- }
-
- public void doRunCompleted(IRunStatus run, StreamsHandler.Result result) {
- PrintStream out = streamsHandler.getLogStream();
- out.println("");
- XMLWriter writer = new XMLWriter(new PrintWriter(out, true));
- Object id = run.getIdentifier();
- if (!(id instanceof Runner.IteratorWrapper)) {
- out.println(this + " not IteratorWrapper: "
- + id.getClass().getName() + ": " + id);
- return;
- }
- IRunIterator iter = ((Runner.IteratorWrapper) id).iterator;
- if (!(iter instanceof RunSpecIterator)) {
- out.println(this + " not RunSpecIterator: " + iter.getClass().getName()
- + ": " + iter);
- return;
- }
- ((RunSpecIterator) iter).spec.writeXml(writer);
- out.flush();
- }
-
-}
-
-/**
- * Write junit style XML output (for incorporation into html test results and
- * cruise control reports
- * format is...
- * <?xml version="1.0" encoding="UTF-8" ?>
- * <testsuite errors="x" failures="x" name="suite-name" tests="xx" time="ss.ssss">
- * <properties/>
- * <testcase name="passingTest" time="s.hh"></testcase>
- * <testcase name="failingTest" time="s.hh">
- * <failure message="failureMessage" type="ExceptionType">free text</failure>
- * </testcase>
- * </testsuite>
- */
-class JUnitXMLLogger extends TestCompleteListener {
-
-// private File suite;
- private StringBuffer junitOutput;
- private long startTimeMillis;
- private int numTests = 0;
- private int numFails = 0;
- private DecimalFormat timeFormatter = new DecimalFormat("#.##");
-
- public JUnitXMLLogger(
- String label,
- StreamsHandler streamsHandler,
- IRunValidator runValidator) {
- super(label + ALL, runValidator, streamsHandler);
- junitOutput = new StringBuffer();
- }
-
- /* (non-Javadoc)
- * @see org.aspectj.testing.drivers.TestCompleteListener#doRunCompleted(org.aspectj.testing.run.IRunStatus, org.aspectj.testing.util.StreamsHandler.Result)
- */
- public void doRunCompleted(IRunStatus run, Result result) {
- long duration = System.currentTimeMillis() - startTimeMillis;
- numTests++;
- junitOutput.append("<testcase name=\"" + run.getIdentifier() + "\" ");
- junitOutput.append("time=\"" + timeFormatter.format((duration)/1000.0f) + "\"");
- junitOutput.append(">");
- if (!run.runResult()) {
- numFails++;
- junitOutput.append("\n");
- junitOutput.append("<failure message=\"test failed\" type=\"unknown\">\n");
-// junitOutput.println(result.junitOutput);
-// junitOutput.println(result.err);
- junitOutput.append("</failure>\n");
- }
- junitOutput.append("</testcase>\n");
- }
-
- /* (non-Javadoc)
- * @see org.aspectj.testing.drivers.TestCompleteListener#runStarted(org.aspectj.testing.run.IRunStatus)
- */
- public void runStarting(IRunStatus run) {
- super.runStarting(run);
- startTimeMillis = System.currentTimeMillis();
- }
-
- /* (non-Javadoc)
- * @see org.aspectj.testing.drivers.TestCompleteListener#doEndSuite(java.io.File, long)
- */
- protected void doEndSuite(File suite, long duration) {
- super.doEndSuite(suite, duration);
- String suiteName = suite.getName();
- // junit reporter doesn't like ".xml" on the end
- suiteName = suiteName.substring(0,suiteName.indexOf('.'));
- PrintStream out = streamsHandler.getLogStream();
- out.println("<?xml version=\"1.0\" encoding=\"UTF-8\"?>");
- String timeStr = new DecimalFormat("#.##").format(duration/1000.0);
- out.print("<testsuite errors=\"" + numFails + "\" failures=\"0\" ");
- out.print("name=\"" + suite.getName() + "\" " );
- out.println("tests=\"" + numTests + "\" time=\"" + timeStr + "\">");
- out.print(junitOutput.toString());
- out.println("</testsuite>");
- }
-
- /* (non-Javadoc)
- * @see org.aspectj.testing.drivers.TestCompleteListener#doStartSuite(java.io.File)
- */
- protected void doStartSuite(File suite) {
- super.doStartSuite(suite);
-// this.suite = suite;
- numTests = 0;
- numFails = 0;
- junitOutput = new StringBuffer();
- }
-
-}
-
-/** log pass and/or failed runs */
-class RunLogger extends TestCompleteListener {
- final boolean logStreams;
- final RunUtils.IRunStatusPrinter printer;
-
- /**
- * @param printer the component that prints any status - not null
- * @param runValidator if null, use RunValidator.NORMAL
- */
- public RunLogger(
- String label,
- boolean logStreams,
- StreamsHandler streamsHandler,
- IRunValidator runValidator,
- RunUtils.IRunStatusPrinter printer) {
- super(label, runValidator, streamsHandler);
- LangUtil.throwIaxIfNull(streamsHandler, "streamsHandler");
- LangUtil.throwIaxIfNull(printer, "printer");
- this.logStreams = logStreams;
- this.printer = printer;
- }
-
- public void doRunCompleted(IRunStatus run, StreamsHandler.Result result) {
- PrintStream out = streamsHandler.getLogStream();
- printer.printRunStatus(out, run);
- if (logStreams) {
- if (!LangUtil.isEmpty(result.err)) {
- out.println("--- error");
- out.println(result.err);
- }
- if (!LangUtil.isEmpty(result.out)) {
- out.println("--- ouput");
- out.println(result.out);
- }
- }
- out.println("");
- }
-}
-
-/** trace time and memory between runStaring and runCompleted */
-class TestTraceLogger extends TestCompleteListener {
- private static final Runtime runtime = Runtime.getRuntime();
- private long startTime;
- private long startMemoryFree;
- private final boolean verbose;
-
- public TestTraceLogger(StreamsHandler handler) {
- this(handler, true);
- }
- public TestTraceLogger(StreamsHandler handler, boolean verbose) {
- super("-traceTestsAll", null, handler);
- this.verbose = verbose;
- }
- public void runStarting(IRunStatus run) {
- super.runStarting(run);
- startTime = System.currentTimeMillis();
- startMemoryFree = runtime.freeMemory();
- }
-
- public void doRunCompleted(IRunStatus run, StreamsHandler.Result result) {
- long elapsed = System.currentTimeMillis() - startTime;
- long free = runtime.freeMemory();
- long used = startMemoryFree - free;
- String label = run.runResult() ? "PASS " : "FAIL ";
- PrintStream out = streamsHandler.getLogStream();
- if (verbose) {
- label = label
- + "elapsed: " + LangUtil.toSizedString(elapsed, 7)
- + " free: " + LangUtil.toSizedString(free, 10)
- + " used: " + LangUtil.toSizedString(used, 10)
- + " id: ";
- }
- out.println(label + renderId(run));
- }
-
- /** @return true - always trace tests */
- protected boolean isFailLabel(String label) {
- return true;
- }
-
- /** @return true - always trace tests */
- protected boolean isPassLabel(String label) {
- return true;
- }
-
- /**
- * This implementation returns run identifier toString().
- * Subclasses override this to render id as message suffix.
- */
- protected String renderId(IRunStatus run) {
- return "" + run.getIdentifier();
- }
-}
- // printing files
-// AjcTest.Spec testSpec = AjcTest.unwrapSpec(run);
-// if (null != testSpec) {
-// CompilerRun.Spec compileSpec = AjcTest.unwrapCompilerRunSpec(testSpec);
-// File dir = new File(testSpec.getSuiteDir(), testSpec.getTestDirOffset());
-// List files = compileSpec.getPathsAsFile(dir);
-// StringBuffer sb = new StringBuffer();
-// for (Iterator iter = files.iterator(); iter.hasNext();) {
-// File file = (File) iter.next();
-// sb.append(" " + file.getPath().replace('\\','/').substring(2));
-// }
-// out.println("files: " + sb);
-// }
-
-
+++ /dev/null
-<html>
- <head><title>Harness Package Documentation</title></head>
-<body>
-<p>
-The AspectJ compiler test harness can compile and run AspectJ programs
-as specified by the test definitions.
-This document tells you how to run the harness.
-It describes the options you can specify on the command-line to
-control various components that form the harness, either to
-specify options that augment the test definitions or to
-change how the harness works, e.g., selecting particular tests
-or logging more information.
-For information on how to write a test definition, see
-<code>readme-writing-compiler-tests.html</code> in the testing module.
-</p>
-<p>
-The harness drives compiler tests, using
-a chain of responsibility to map elements
-in the schema of a test definition to implementing classes.
-
-</p>
-<table border="1" cellpadding="1">
-<tr><th align="left">Test feature</th>
- <th align="left">Description</th>
- <th align="left">Implementing class</th>
-</tr>
-<tr><td>(loading suites...)</td>
- <td>general harness</td>
- <td>Harness</td>
-</tr>
-<tr><td>(logging...)</td>
- <td>subclass feature harness</td>
- <td>FeatureHarness</td>
-</tr>
-<tr><td><code><suite></code></td>
- <td>Test suite</td>
- <td>AjcTest.Suite</td>
-</tr>
-<tr><td> <code><ajc-test></code></td>
- <td>Test case</td>
- <td>AjcTest</td>
-</tr>
-<tr><td> <code><compile></code></td>
- <td>Initial (batch) compile run</td>
- <td>CompilerRun</td>
-</tr>
-<tr><td> <code><inc-compile></code></td>
- <td>Incremental re-compile</td>
- <td>IncCompilerRun</td>
-</tr>
-<tr><td> <code><run></code></td>
- <td>Run class</td>
- <td>JavaRun</td>
-</tr>
-</table>
-<!--
- general harness (Harness)
- subclass feature harness (FeatureHarness)
- <ajc-test> run component (AjcTest)
- <compile> {sub-} run component (CompilerRun)
- <inc-compile> {sub-} run component (IncCompilerRun)
- <run> {sub-} run component (JavaRun)
- ...
--->
-<p/>
-The compiler used is the AspectJ compiler <code>ajc</code>
-(optionally as wrapped by the Ant task or AJDE API's), but
-in principle any compiler accepting similar options can be
-used.
-<p/>
-To run from the command-line, use
-<code>Harness.main(String[])</code>.
-To run programmatically, use <code>Harness.getHarness()</code>.
-<code>Harness.runMain(String[])</code> takes arguments that
-each component in the chain may accept and interpret, so
-you can modify how the tests run by specifying the following
-arguments on the harness command line:
-<p/>
-<table cellpadding="1" border="1">
-<tr><th>Component</th><th>Options</th></tr>
-
-<tr><td rowspan="6" valign="top">Harness
- <p>suite files, harness verbosity, temp files, option variants
- </p></td></tr>
- <tr><td><u>suite files</u>: ajcTest-compliant .txt or .xml files are accepted.
- <!-- XXX link to ajcTestSuite.dtd and .txt definitions -->
- </td></tr>
- <tr><td><u><code>-verboseHarness</code></u>,
- <u><code>-quietHarness</code></u>:
- Log accepted options and skipped tests,
- or do not print even info messages.
- </td></tr>
- <tr><td><u><code>-keepTemp</code></u>: Normally the harness saves temp files until
- the end of the run, and deletes them. If you abort the run or specify
- <code>-keepTemp</code>, then temporary (sandbox) directories will remain for analysis.
- In either case, the file system accumulates all temporary directories
- and files used for a give harness run; for the <code>ajcTests.xml</code>
- suite, this runs into thousands of files.
- </td></tr>
- <tr><td><u><code>-killTemp</code></u>: The opposite of <code>-keepTemp</code>,
- this causes the harness to delete temporary (sandbox) directories at
- the end of each test run.
- In this case, the file system only accumulates files for
- the current test.
- </td></tr>
- <tr><td><u>*- variants</u>: Options with a trailing "-" cause two sets of
- option lists to be produced, one with and one without the corresponding
- option. E.g., "-emacssym-" will run the suite twice, once with and
- once without the "-emacssym" flag.
- That means if you use this on each of three options, you will
- get 8 variant sets (1 with no options, 1 with all 3 options,
- 3 with 2 options, and 3 with 1 option).
- </td></tr>
-
-<tr><td rowspan="5" valign="top">FeatureHarness
- <p>output and logging options
- </p></td></tr>
- <tr><td><u>tracing</u>:
- <code>-progressDots</code> will print "." for every passed
- test completed and "!" for every test completed but not passed.
- <code>-traceTests</code> will print a one-line summary for each test
- of the time and space taken and whether the test passed.
- <code>-traceTestsMin</code> will print only the test and whether it passed.
- <code>-baseline</code> is an alias for
- <code>-traceTestsMin</code>
- <code>-hideStreams</code> and
- <code>!eclipse</code>, used to emit tests results in a form
- comparable by <code>org.aspectj.testing.util.TestDiffs</code>.
- or usable to select tests by title for options like
- <code>-ajctestTitleList</code>.
- </td></tr>
-
- <tr><td><u>output</u>: <code>-hide{Compiler|Run}Streams</code> will prevent output and
- error streams from being printed to System.err and System.out,
- optionally only for run or compile steps.
- </td></tr>
- <tr><td><u>logging</u>:
- Log variants take the form <code>-log{Min|Xml}[Fail|Pass|All]</code>.
- The suffix {All|Pass|Fail} selects all tests or only passing or failing tests.
- The infix {Min} means to log with minimal information, typically only any
- fail messages.
- The infix {Xml} means to log the XML form of the test definition, so that
- you can inspect the input or re-run arbitrary tests.
- (You can also re-run a set of tests using keywords
- (e.g., "<code>-ajctestsRequireKeywords=...</code>" or using titles
- (e.g., "<code>-ajctestsTitleFailList=ajcTestResults.txt</code>".)
- Finally, the experimental option <code>-XlogPublicType</code> will
- log the XML test definition for
- any test run that emits any ERROR messages containing the text "public type".
- </td></tr>
- <tr><td><u>interaction of output streams and logging</u>:
- Streams will be emitted in real-time,
- <em>before</em> the test is logged, unless streams are hidden.
- When logging in normal (non-Min or -XML) form, the log will emit the streams
- with the test report, so e.g., you can use -hideStreams -logFail to
- hide streams for passing tests but emit them for failing tests
- in the context of the log.
- </td></tr>
-
-<tr><td rowspan="5" valign="top">AjcTest
- <p>selection options for keywords, bugID (PR), or title (description)
- </p></td></tr>
- <tr><td><u>keywords</u>: <code>-ajctest[Require|Skip]Keywords=one{,two}</code>
- will either require or skip a test that has one of the
- specified keywords.
- </td></tr>
- <tr><td><u>Bugs</u>: <code>-ajctestPR=101{,102}</code>
- will require that a test have one of the listed bug id's.
- </td></tr>
- <tr><td><u>title</u>:
- <code>"-ajctestTitleContains=one,two"</code>
- will require that the title (description) of a test contain
- one of the specified substrings, here either "one" or "two".
- Use this to select a few tests you know generally.
- <br/>
- <code>"-ajctestTitleList=first title\, in theory, second title"</code>
- will require that the title (description) of a test be
- exactly "first title, in theory" or "second title".
- The entire option must be one argument on the command line.
- Use this when working with just a few specific tests.
- <br/>
- <code>"-ajctestTitleList=../tests/ajcTestResults.txt"</code>
- will require that the title (description) of a test be
- equal to one listed in <code>../tests/ajcTestResults.txt</code>
- as a line of the form "[PASS|FAIL] {title}(.."
- (This form is emitted by the <code>-traceTestsMin</code> option).
- This option only differs from the prior in that the parameter
- is a valid file to read.
- Use this to re-run a large set of tests.
- <br/>
- <code>"-ajctestTitleFailList=../tests/ajcTestResults.txt"</code>
- is the same as the <code>-ajctestTitleList={file}</code> variant,
- except that only results prefixed "FAIL" are included.
- Use this to re-run only the tests that failed from a large set.
- </td></tr>
-
- <tr><td><u>Combinations</u>: all selectors are applied to each test,
- so all tests selected will comply with all constraints.
- Specifying lists within a particular constraints will match
- a union of tests for that constraint
- (e.g., all tests with bug id's 101 or 102),
- but there is no way to get a union of constraints
- (e.g., all tests with bug id's 101 or 102 <em>or</em>
- with keywords pure-java or knownLimitation).
- However, <code>-ajctestSkipKeywords=...</code> can return all
- tests without the specified keywords, so it can form unions like
- "all tests without the knownLimitation keyword, but with
- bug id's 101 or 102".
- Title lists can work similarly. E.g., to run the failed
- incremental tests from ajcTestResults.txt, specify
- <code>-ajctestTitleFailList=../tests/ajcTestResults.txt</code>
- <code>-ajctestRequireKeywords=incremental-test</code>.
- </td></tr>
-
-<tr><td rowspan="6" valign="top">CompilerRun
-<p>compiler options and side-effects
- </p></td></tr>
- <tr><td><u>supported options</u>:
- The command line passed to the compiler by <code>CompilerRun</code>
- is composed of entries derived from the <code><compile></code>
- attributes and recognized from the harness command line.
- <code><compile></code> has specific attributes like
- <code>files</code>,
- <code>argfiles</code>,
- <code>classpath</code> and
- <code>sourceroot</code>
- which translate directly to their counterparts.
- The output option <code>-d</code> is defined by <code>CompilerRun</code> and
- may not be specified (and <code>-outjar</code> is not supported).
- Most other compiler options are defined in
- <code>CompilerRun.Spec.CRSOptions</code> and may be specified
- on the harness command-line
- or in the <code>options</code> attribute of
- <code><compile></code>.
- In the <code>options</code> attribute, each argument is comma-delimited,
- so an option with an argument would look like
- <code><compile options="-source,1.4" ...></code>.
- If options collide, duplicates
- can be resolved using option dominance (below).
- </td></tr>
- <tr><td><u>compiler selectors</u>:
- Use <code>-ajc</code> or <code>-eclipse</code> to select the old
- (ajc 1.0) or new (eajc 1.1) compilers.
- Note that the old compiler is not
- available in the CVS source tree at eclipse.org.
- Use <code>-ajdeCompiler</code> to run a wrapper around the
- AJDE interface
- and <code>-ajctaskCompiler</code> to run a wrapper around the
- AjcTask (Ant task) interface.
- </td></tr>
- <tr><td><u>option dominance <code>[-|!|^]</code></u>:
- Some tests require or prohibit certain options;
- likewise, sometime you want to force all tests
- run with or without an option specified on the command-line,
- regardless of its setting in the <code><compile options=".." ...></code>
- attribute.
- For this reason an option may be specified in the options attribute
- or on the harness command-line as
- <code>-option</code>,
- <code>!option</code>, or
- <code>^option</code>.
- <ul>
- <li><u>- set</u>: If the leading character of an option is "-", then it is set unless forced-off.</li>
- <li><u>^ force-off</u>: If the leading character of an option is "^", then it is forced off.
- Any other matching option will be removed.</li>
- <li><u>! force-on</u>: If the leading character of an option is "!", then it is forced on.
- Any other non-force-on matching option will be removed.</li>
- <li><u>force conflict</u>: If there are two matching force-on options, the test is skipped.</li>
- <li><u>related options</u>: Two options match if they are the same or
- if they are in the same family. For example, <code>-ajc</code> and
- <code>eclipse</code> are both compiler, and <code>-source 1.4</code>
- and <code>-source 1.3</code> are both source.
- <br/>
- </li>
- </ul>
- </td></tr>
- <tr><td><u>auto-skip</u>: After combining global and local options, there may be
- conflicting or impossible options, which cause the test to be skipped:
- <ul>
- <li><u>semantic conflicts</u>: two options may conflict in meaning
- - e.g., <code>-lenient</code> and <code>-strict</code></li>
- <li><u>impossible option</u>: It may not be possible in the current configuration to
- implement an option - e.g., <code>-usejavac</code> or <code>-eclipse</code>
- when javac or the eclipse implementation is not on the classpath
- <br/></li>
- </ul>
- </td></tr>
-
- <tr><td><u>source searching</u>: Given <code>-seek:{literal}</code>,
- as a side-effect,
- <code>CompilerRun</code> will search source files for {literal},
- emitting for each instance an INFO message of the form:
- <tt>found: {file}:{line}:{column}</tt>
- (Note that the harness does not display INFO messages unless <tt>-verboseHarness</tt>
- or <tt>-loud</tt> is used.)
- </td></tr>
-
-
- <tr><td rowspan="2" valign="top">JavaRun
- <p>Options and forking</p></td>
- <td><u>options</u>: options specified in the test are passed
- to the main method as they would be on the command-line.
- No options passed to the harness are passed through to
- the main class.
- </td></tr>
- <tr><td><u>forking</u>:
- Forking is useful to run in a different version of Java
- than can be supported by the harness (i.e., some 1.1 flavor);
- it's very time-consuming otherwise.
- Currently forking is only controllable through system properties
- of the invoking vm (defined in JavaRun.java):
- <ul>
- <li><u>javarun.fork</u>: anything to run in a new vm.
- </li>
- <li><u>javarun.java</u>: path to the java executable to run
- (suffix included). If not supplied, the harness tries to
- find the java that invoked the harness.
- </li>
- <li><u>javarun.java.home</u>: the value of the JAVA_HOME
- environment variable, if it needs to be set.
- </li>
- <li><u>javarun.bootclasspath</u>: this is prepended to the
- run classpath. Multiple entries must be separated by
- the system-dependent path separator.
- </li>
- <li><u>javarun.vmargs</u>: this is added to the fork command-line
- right after java. Multiple entries must be separated by a comma
- (and the whole thing should be one parameter), e.g.,
- <code>-Djavarun.vmargs=-Dname=value,-Dname2="value 2"</code>
- </li>
- </ul>
- </td></tr>
-</table>
-<br/>
-Following are some sample configurations:
-<ul>
-<li><code>java {harness} -hideStreams {suiteFile}</code>
- <p>Use this to output only a 1-line summary of the test results
- (tests skipped, incomplete, failed, passed).<br/></p>
- </li>
-
-<li><code>java {harness} -hideStreams -traceTestsMin {suiteFile} > results.txt</code>
- <p>This writes to result.txt one line [PASS|FAIL] per test, plus a
- 1-line summary of the test results.<br/></p>
- </li>
-
-<li><code>java {harness} -logFail {suiteFile} -ajctestTitleFailList=results.txt</code>
- <p>This re-runs any test that failed from the "results.txt" run,
- verbosely logging any fails.<br/></p>
- </li>
-
-<li><code>java {harness} -hideStreams -logMinFail {suiteFile}</code>
- <p>Use this when running tests mainly to see if they pass or
- if the failure messages are typically enough information
- to indicate why the test is failing. It produces only minimal
- output for failed tests.<br/></p>
- </li>
-
-<li><code>java {harness} -hideStreams -verboseHarness -logFail {suiteFile}</code>
- <p>When it's not clear at first glance why a test is failing, before
- looking at the test code you can run it and print any harness or test
- setup failures and all the associated messages from the test components.<br/></p>
- </li>
-
-<li><code>java {harness} -hideStreams -usejavac- -ajc -Xlint- {suiteFile}</code>
- <p>Because of the trailing '-' on two of the options,
- this would do four complete runs with the old (Ajc 1.0) compiler: one with
- no options, one with -lenient, one with -Xlint, and one with both.<br/></p>
- </li>
-
-
-<li><code>java {harness} --ajctestPR=101,102 -Xlint- ^usejavac !eclipse {suiteFile}</code>
- <p>Run any tests associated with bugs 101 and 102, with and without -Xlint,
- forcing off -usejavac and forcing the use of the new eclipse-based compiler.<br/></p>
- </li>
-
-</ul>
-
-If you have a set of options you use often, you can define a single-word
-option alias for it; see <code>Harness.optionAliases</code>.
-
-<br/><u>Configuration</u>: Most tests use the library jars in
-<code>modules/lib/test</code>, defined in
-<code>org.aspectj.testing.harness.bridge.Globals</code>.
-Normally the harness finds these by relative path
-<code>../lib/tests/*.jar</code>, which works whenever the tests are
-run from a peer module directory. When running tests elsewhere,
-define the environment variable <code>harness.libdir</code> - e.g.,
-<pre>
- $ cd aspectj/tests
- $ java -Dharness.libdir=../modules/lib/test ...
-</pre>
-
-<br/><u>Forking:</u>:
-The harness must be run in a compiler-compatible VM, and the
-compiler steps run in-process.
-However, the java steps can be run in forked mode, which is useful
-when compiling for a VM which can't run the compiler.
-To compile for a different target VM could require
-setting the options for bootclasspath, target, and source.
-To run the harness so that any <run.. tasks run in a
-separate vm, do something like this:
-<pre>
- java -Djavarun.java=d:\jdk1.1.8\bin\java.exe \
- -Djavarun.bootclasspath=d:\jdk1.1.8\lib\classes.zip \
- -Djavarun.java.home=d:\jdk1.1.8 \
- -Djavarun.fork=true \
- -jar ../aj-build/jars/testing-drivers-all.jar \
- ajcTests.xml -logFail
-</pre>
-
-Here <code>CompilerRun</code> would add the bootclasspath as such when compiling.
-JavaRun would fork using the 1.1 vm and prepend the bootclasspath
-to the classpath, with an effect like these commands
-(ignoring the line splitting in the classpath):
-<pre>
- set JAVA_HOME=d:\jdk1.1.8
- d:\jdk1.1.8\bin\java.exe \
- -classpath "d:\jdk1.1.8\lib\classes.zip;
- d:\aspectj-src\lib\test\testing-client.jar;
- d:\aspectj-src\lib\test\aspectjrt.jar;
- c:\TEMP\sandbox7wers\classes"
- {mainClass} {option..}
-</pre>
-
-
-</body>
-</html>
--- /dev/null
+/* *******************************************************************
+ * Copyright (c) 2002 Palo Alto Research Center, Incorporated (PARC),
+ * 2003 Contributors.
+ * All rights reserved.
+ * This program and the accompanying materials are made available
+ * under the terms of the Eclipse Public License v1.0
+ * which accompanies this distribution and is available at
+ * http://www.eclipse.org/legal/epl-v10.html
+ *
+ * Contributors:
+ * Xerox/PARC initial implementation
+ * Wes Isberg 2003 changes.
+ * ******************************************************************/
+
+package org.aspectj.testing.drivers;
+
+import org.aspectj.bridge.IMessage;
+import org.aspectj.bridge.IMessageHolder;
+import org.aspectj.bridge.MessageHandler;
+import org.aspectj.bridge.MessageUtil;
+import org.aspectj.testing.harness.bridge.AbstractRunSpec;
+import org.aspectj.testing.harness.bridge.AjcTest;
+import org.aspectj.testing.harness.bridge.CompilerRun;
+import org.aspectj.testing.harness.bridge.FlatSuiteReader;
+import org.aspectj.testing.harness.bridge.IncCompilerRun;
+import org.aspectj.testing.harness.bridge.JavaRun;
+import org.aspectj.testing.harness.bridge.RunSpecIterator;
+import org.aspectj.testing.harness.bridge.Sandbox;
+import org.aspectj.testing.harness.bridge.Validator;
+import org.aspectj.testing.run.IRun;
+import org.aspectj.testing.run.IRunIterator;
+import org.aspectj.testing.run.IRunListener;
+import org.aspectj.testing.run.IRunStatus;
+import org.aspectj.testing.run.IRunValidator;
+import org.aspectj.testing.run.RunListener;
+import org.aspectj.testing.run.RunStatus;
+import org.aspectj.testing.run.RunValidator;
+import org.aspectj.testing.run.Runner;
+import org.aspectj.testing.util.BridgeUtil;
+import org.aspectj.testing.util.RunUtils;
+import org.aspectj.testing.util.StreamsHandler;
+import org.aspectj.testing.util.StreamsHandler.Result;
+import org.aspectj.testing.xml.AjcSpecXmlReader;
+import org.aspectj.testing.xml.XMLWriter;
+import org.aspectj.util.FileUtil;
+import org.aspectj.util.LangUtil;
+
+import java.io.BufferedReader;
+import java.io.File;
+import java.io.FileReader;
+import java.io.IOException;
+import java.io.PrintStream;
+import java.io.PrintWriter;
+import java.text.DecimalFormat;
+import java.text.SimpleDateFormat;
+import java.util.*;
+
+/**
+ * Test harness for running AjcTest.Suite test suites.
+ * This can be easily extended by subclassing.
+ * <ul>
+ * <li>template algorithms for reading arguments, printing syntax,
+ * reading suites, and reporting results all
+ * delegate to methods that subclasses can override to support
+ * additional arguments or different reporting.</li>
+ * <li>implement arbitrary features as IRunListeners</li>
+ * <li>support single-option aliases to any number of single-options </li>
+ * </ul>
+ * See {@link report(IRunStatus, int, int)} for an explanation of test result
+ * categories.
+ */
+public class Harness {
+ /**
+ * Spaces up to the width that an option should take in the syntax,
+ * including the two-space leader
+ */
+ protected static final String SYNTAX_PAD = " ";
+ protected static final String OPTION_DELIM = ";";
+ private static final String JAVA_VERSION;
+ private static final String ASPECTJ_VERSION;
+ static {
+ String version = "UNKNOWN";
+ try { version = System.getProperty("java.version", "UNKNOWN"); }
+ catch (Throwable t) {}
+ JAVA_VERSION = version;
+
+ version = "UNKNOWN";
+ try {
+ Class c = Class.forName("org.aspectj.bridge.Version");
+ version = (String) c.getField("text").get(null);
+ } catch (Throwable t) {
+ // ignore
+ }
+ ASPECTJ_VERSION = version;
+ }
+
+ /** factory for the subclass currently anointed as default */
+ public static Harness makeHarness() {
+ return new FeatureHarness();
+ }
+
+ /** @param args String[] like runMain(String[]) args */
+ public static void main(String[] args) throws Exception {
+ if (LangUtil.isEmpty(args)) {
+ File argFile = new File("HarnessArgs.txt");
+ if (argFile.canRead()) {
+ args = readArgs(argFile);
+ } else {
+ args = new String[] { "-help" };
+ }
+ }
+ makeHarness().runMain(args, null);
+ }
+
+ /**
+ * Get known option aliases.
+ * Subclasses may add new aliases, where the key is the alias option,
+ * and the value is a comma-delimited String of target options.
+ * @return Properties with feature aliases or null
+ */
+ protected static Properties getOptionAliases() {
+ if (null == optionAliases) {
+ optionAliases = new Properties();
+ // XXX load from **OptionAliases.properties
+ }
+ return optionAliases;
+ }
+
+ /**
+ * Read argFile contents into String[],
+ * delimiting at any whitespace
+ */
+ private static String[] readArgs(File argFile) {
+ ArrayList args = new ArrayList();
+// int lineNum = 0;
+
+ try {
+ BufferedReader stream =
+ new BufferedReader(new FileReader(argFile));
+ String line;
+ while (null != (line = stream.readLine())) {
+ StringTokenizer st = new StringTokenizer(line);
+ while (st.hasMoreTokens()) {
+ args.add(st.nextToken());
+ }
+ }
+ } catch (IOException e) {
+ e.printStackTrace(System.err);
+ }
+ return (String[]) args.toArray(new String[0]);
+ }
+
+ /** aliases key="option" value="option{,option}" */
+ private static Properties optionAliases;
+
+ /** be extra noisy if true */
+ private boolean verboseHarness;
+
+ /** be extra quiet if true */
+ private boolean quietHarness;
+
+ /** just don't say anything! */
+ protected boolean silentHarness;
+
+ /** map of feature names to features */
+ private HashMap features;
+
+ /** if true, do not delete temporary files. */
+ private boolean keepTemp;
+
+ /** if true, delete temporary files as each test completes. */
+ private boolean killTemp;
+
+ /** if true, then log results in report(..) when done */
+ private boolean logResults;
+
+ /** if true and there were failures, do System.exit({numFailures})*/
+ private boolean exitOnFailure;
+
+ protected Harness() {
+ features = new HashMap();
+ }
+
+
+ /**
+ * Entry point for a test.
+ * This reads in the arguments,
+ * creates the test suite(s) from the input file(s),
+ * and for each suite does setup, run, report, and cleanup.
+ * When arguments are read, any option ending with "-" causes
+ * option variants, a set of args with and another without the
+ * option. See {@link LangUtil.optionVariants(String[])} for
+ * more details.
+ * @param args the String[] for the test suite - use -help to get options,
+ * and use "-" suffixes for variants.
+ * @param resultList List for IRunStatus results - ignored if null
+ */
+ public void runMain(String[] args, List resultList) {
+ LangUtil.throwIaxIfFalse(!LangUtil.isEmpty(args), "empty args");
+ // read arguments
+ final ArrayList globals = new ArrayList();
+ final ArrayList files = new ArrayList();
+ final LinkedList argList = new LinkedList();
+ argList.addAll(Arrays.asList(args));
+ for (int i = 0; i < argList.size(); i++) {
+ String arg = (String) argList.get(i);
+ List aliases = aliasOptions(arg);
+ if (!LangUtil.isEmpty(aliases)) {
+ argList.remove(i);
+ argList.addAll(i, aliases);
+ i--;
+ continue;
+ }
+ if ("-help".equals(arg)) {
+ logln("java " + Harness.class.getName() + " {option|suiteFile}..");
+ printSyntax(getLogStream());
+ return;
+ } else if (isSuiteFile(arg)) {
+ files.add(arg);
+ } else if (!acceptOption(arg)) {
+ globals.add(arg);
+ } // else our options absorbed
+ }
+ if (0 == files.size()) {
+ logln("## Error reading arguments: at least 1 suite file required");
+ logln("java " + Harness.class.getName() + " {option|suiteFile}..");
+ printSyntax(getLogStream());
+ return;
+ }
+ String[] globalOptions = (String[]) globals.toArray(new String[0]);
+ String[][] globalOptionVariants = optionVariants(globalOptions);
+ AbstractRunSpec.RT runtime = new AbstractRunSpec.RT();
+ if (verboseHarness) {
+ runtime.setVerbose(true);
+ }
+
+ // run suites read from each file
+ AjcTest.Suite.Spec spec;
+ for (Iterator iter = files.iterator(); iter.hasNext();) {
+ File suiteFile = new File((String) iter.next());
+ if (!suiteFile.canRead()) {
+ logln("runMain(..) cannot read file: " + suiteFile);
+ continue;
+ }
+ if (null == (spec = readSuite(suiteFile))) {
+ logln("runMain(..) cannot read suite from file: " + suiteFile);
+ continue;
+ }
+
+ MessageHandler holder = new MessageHandler();
+ for (int i = 0; i < globalOptionVariants.length; i++) {
+ runtime.setOptions(globalOptionVariants[i]);
+ holder.init();
+ boolean skip = !spec.adoptParentValues(runtime, holder);
+ // awful/brittle assumption about number of skips == number of skip messages
+ final List skipList = MessageUtil.getMessages(holder, IMessage.INFO, false, "skip");
+ if ((verboseHarness || skip || (0 < skipList.size()))) {
+ final List curArgs = Arrays.asList(globalOptionVariants[i]);
+ logln("runMain(" + suiteFile + ", " + curArgs + ")");
+ if (verboseHarness) {
+ String format = "yyyy.MM.dd G 'at' hh:mm:ss a zzz";
+ SimpleDateFormat formatter = new SimpleDateFormat (format);
+ String date = formatter.format(new Date());
+ logln("test date: " + date);
+ logln("harness features: " + listFeatureNames());
+ logln("Java version: " + JAVA_VERSION);
+ logln("AspectJ version: " + ASPECTJ_VERSION);
+ }
+ if (!(quietHarness || silentHarness) && holder.hasAnyMessage(null, true)) {
+ MessageUtil.print(getLogStream(), holder, "skip - ");
+ MessageUtil.printMessageCounts(getLogStream(), holder, "skip - ");
+ }
+ }
+ if (!skip) {
+ doStartSuite(suiteFile);
+ long elapsed = 0;
+ RunResult result = null;
+ try {
+ final long startTime = System.currentTimeMillis();
+ result = run(spec);
+ if (null != resultList) {
+ resultList.add(result);
+ }
+ elapsed = System.currentTimeMillis() - startTime;
+ report(result.status, skipList.size(), result.numIncomplete, elapsed);
+ } finally {
+ doEndSuite(suiteFile,elapsed);
+ }
+ if (exitOnFailure) {
+ int numFailures = RunUtils.numFailures(result.status, true);
+ if (0 < numFailures) {
+ System.exit(numFailures);
+ }
+ Object value = result.status.getResult();
+ if ((value instanceof Boolean)
+ && !((Boolean) value).booleanValue()) {
+ System.exit(-1);
+ }
+ }
+ }
+ }
+ }
+ }
+
+
+ /**
+ * Tell all IRunListeners that we are about to start a test suite
+ * @param suiteFile
+ * @param elapsed
+ */
+ private void doEndSuite(File suiteFile, long elapsed) {
+ Collection c = features.values();
+ for (Iterator iter = c.iterator(); iter.hasNext();) {
+ Feature element = (Feature) iter.next();
+ if (element.listener instanceof TestCompleteListener) {
+ ((TestCompleteListener)element.listener).doEndSuite(suiteFile,elapsed);
+ }
+ }
+ }
+ /**
+ * Generate variants of String[] options by creating an extra set for
+ * each option that ends with "-". If none end with "-", then an
+ * array equal to <code>new String[][] { options }</code> is returned;
+ * if one ends with "-", then two sets are returned,
+ * three causes eight sets, etc.
+ * @return String[][] with each option set.
+ * @throws IllegalArgumentException if any option is null or empty.
+ */
+ public static String[][] optionVariants(String[] options) {
+ if ((null == options) || (0 == options.length)) {
+ return new String[][] { new String[0]};
+ }
+ // be nice, don't stomp input
+ String[] temp = new String[options.length];
+ System.arraycopy(options, 0, temp, 0, temp.length);
+ options = temp;
+ boolean[] dup = new boolean[options.length];
+ int numDups = 0;
+
+ for (int i = 0; i < options.length; i++) {
+ String option = options[i];
+ if (LangUtil.isEmpty(option)) {
+ throw new IllegalArgumentException("empty option at " + i);
+ }
+ if (option.endsWith("-")) {
+ options[i] = option.substring(0, option.length()-1);
+ dup[i] = true;
+ numDups++;
+ }
+ }
+ final String[] NONE = new String[0];
+ final int variants = exp(2, numDups);
+ final String[][] result = new String[variants][];
+ // variant is a bitmap wrt doing extra value when dup[k]=true
+ for (int variant = 0; variant < variants; variant++) {
+ ArrayList next = new ArrayList();
+ int nextOption = 0;
+ for (int k = 0; k < options.length; k++) {
+ if (!dup[k] || (0 != (variant & (1 << (nextOption++))))) {
+ next.add(options[k]);
+ }
+ }
+ result[variant] = (String[]) next.toArray(NONE);
+ }
+ return result;
+ }
+
+ private static int exp(int base, int power) { // not in Math?
+ if (0 > power) {
+ throw new IllegalArgumentException("negative power: " + power);
+ }
+ int result = 1;
+ while (0 < power--) {
+ result *= base;
+ }
+ return result;
+ }
+
+ /**
+ * @param suiteFile
+ */
+ private void doStartSuite(File suiteFile) {
+ Collection c = features.values();
+ for (Iterator iter = c.iterator(); iter.hasNext();) {
+ Feature element = (Feature) iter.next();
+ if (element.listener instanceof TestCompleteListener) {
+ ((TestCompleteListener)element.listener).doStartSuite(suiteFile);
+ }
+ }
+ }
+
+ /** Run the test suite specified by the spec */
+ protected RunResult run(AjcTest.Suite.Spec spec) {
+ LangUtil.throwIaxIfNull(spec, "spec");
+ /*
+ * For each run, initialize the runner and validator,
+ * create a new set of IRun{Iterator} tests,
+ * and run them.
+ * Delete all temp files when done.
+ */
+ Runner runner = new Runner();
+ if (0 != features.size()) {
+ for (Iterator iter = features.entrySet().iterator(); iter.hasNext();) {
+ Feature feature = (Feature) ((Map.Entry) iter.next()).getValue();
+ runner.registerListener(feature.clazz, feature.listener);
+ }
+ }
+ IMessageHolder holder = new MessageHandler();
+ int numIncomplete = 0;
+ RunStatus status = new RunStatus(holder, runner);
+ status.setIdentifier(spec);
+ // validator is used for all setup in entire tree...
+ Validator validator = new Validator(status);
+ if (!killTemp) {
+ validator.lock(this);
+ }
+ Sandbox sandbox = null;
+ try {
+ sandbox = new Sandbox(spec.getSuiteDirFile(), validator);
+ IRunIterator tests = spec.makeRunIterator(sandbox, validator);
+ runner.runIterator(tests, status, null);
+ if (tests instanceof RunSpecIterator) {
+ numIncomplete = ((RunSpecIterator) tests).getNumIncomplete();
+ }
+ } finally {
+ if (!keepTemp) {
+ if (!killTemp) {
+ validator.unlock(this);
+ }
+ validator.deleteTempFiles(verboseHarness);
+ }
+ }
+ return new RunResult(status, numIncomplete);
+ }
+
+ /**
+ * Report the results of a test run after it is completed.
+ * Clients should be able to identify the number of:
+ * <ul>
+ * <li>tests run and passed</li>
+ * <li>tests failed, i.e., run and not passed (fail, error, etc.)</li>
+ * <li>tests incomplete, i.e., test definition read but test run setup failed</li>
+ * <li>tests skipped, i.e., test definition read and found incompatible with
+ * the current configuration.</li>
+ * <ul>
+ *
+ * @param status returned from the run
+ * @param numSkipped int tests that were skipped because of
+ * configuration incompatibilities
+ * @param numIncomplete int tests that failed during setup,
+ * usually indicating a test definition or configuration error.
+ * @param msElapsed elapsed time in milliseconds
+ * */
+ protected void report(IRunStatus status, int numSkipped, int numIncomplete,
+ long msElapsed ) {
+ if (logResults) {
+ RunUtils.AJCSUITE_PRINTER.printRunStatus(getLogStream(), status);
+ } else if (!(quietHarness || silentHarness) && (0 < status.numMessages(null, true))) {
+ if (!silentHarness) {
+ MessageUtil.print(getLogStream(), status, "");
+ }
+ }
+
+ logln(BridgeUtil.childString(status, numSkipped, numIncomplete)
+ + " " + (msElapsed/1000) + " seconds");
+
+ }
+
+ // --------------- delegate methods
+ protected void logln(String s) {
+ if (!silentHarness) {
+ getLogStream().println(s);
+ }
+ }
+
+ protected PrintStream getLogStream() {
+ return System.out;
+ }
+
+ protected boolean isSuiteFile(String arg) {
+ return ((null != arg)
+ && (arg.endsWith(".txt") || arg.endsWith(".xml"))
+ && new File(arg).canRead());
+ }
+
+ /**
+ * Get the options that the input option is an alias for.
+ * Subclasses may add options directly to the getFeatureAliases result
+ * or override this.
+ * @return null if the input is not an alias for other options,
+ * or a non-empty List (String) of options that this option is an alias for
+ */
+ protected List aliasOptions(String option) {
+ Properties aliases = Harness.getOptionAliases();
+ if (null != aliases) {
+ String args = aliases.getProperty(option);
+ if (!LangUtil.isEmpty(args)) {
+ return LangUtil.anySplit(args, OPTION_DELIM);
+ }
+ }
+ return null;
+ }
+
+ /**
+ * Read and implement any of our options.
+ * Options other than this and suite files will be
+ * passed down as parent options through the test spec hierarchy.
+ * Subclasses override this to implement new options.
+ */
+ protected boolean acceptOption(String option) {
+// boolean result = false;
+ if (LangUtil.isEmpty(option)) {
+ return true; // skip bad input
+ } else if ("-verboseHarness".equals(option)) {
+ verboseHarness = true;
+ } else if ("-quietHarness".equals(option)) {
+ quietHarness = true;
+ } else if ("-silentHarness".equals(option)) {
+ silentHarness = true;
+ } else if ("-keepTemp".equals(option)) {
+ keepTemp = true;
+ } else if ("-killTemp".equals(option)) {
+ killTemp = true;
+ } else if ("-logResults".equals(option)) {
+ logResults = true;
+ } else if ("-exitOnFailure".equals(option)) {
+ exitOnFailure = true;
+ } else {
+ return false;
+ }
+ return true;
+ }
+
+ /**
+ * Read a test suite file.
+ * This implementation knows how to read .txt and .xml files
+ * and logs any errors.
+ * Subclasses override this to read new kinds of suites.
+ * @return null if unable to read (logging errors) or AjcTest.Suite.Spec otherwise
+ */
+ protected AjcTest.Suite.Spec readSuite(File suiteFile) {
+ if (null != suiteFile) {
+ String path = suiteFile.getPath();
+ try {
+ if (path.endsWith(".xml")) {
+ return AjcSpecXmlReader.getReader().readAjcSuite(suiteFile);
+ } else if (path.endsWith(".txt")) {
+ return FlatSuiteReader.ME.readSuite(suiteFile);
+ } else {
+ logln("unrecognized extension? " + path);
+ }
+ } catch (IOException e) {
+ e.printStackTrace(getLogStream());
+ }
+ }
+ return null;
+ }
+
+ /** Add feature to take effect during the next runMain(..) invocation.
+ * @param feature the Feature to add, using feature.name as key.
+ */
+ protected void addFeature(Feature feature) {
+ if (null != feature) {
+ features.put(feature.name, feature);
+ }
+ }
+
+ /** remove feature by name (same as feature.name) */
+ protected void removeFeature(String name) {
+ if (!LangUtil.isEmpty(name)) {
+ features.remove(name);
+ }
+ }
+
+ /** @return unmodifiable Set of feature names */
+ protected Set listFeatureNames() {
+ return Collections.unmodifiableSet(features.keySet());
+ }
+
+ /** print detail message for syntax of main(String[]) command-line */
+ protected void printSyntax(PrintStream out) {
+ out.println(" {??} unrecognized options are used as test spec globals");
+ out.println(" -help print this help message");
+ out.println(" -verboseHarness harness components log verbosely");
+ out.println(" -quietHarness harness components suppress logging");
+ out.println(" -keepTemp do not delete temporary files");
+ out.println(" -logResults log results at end, verbosely if fail");
+ out.println(" -exitOnFailure do System.exit({num-failures}) if suite fails");
+ out.println(" {suiteFile}.xml.. specify test suite XML file");
+ out.println(" {suiteFile}.txt.. specify test suite .txt file (deprecated)");
+ }
+
+ /** print known aliases at the end of the syntax message */
+ protected void printAliases(PrintStream out) {
+ LangUtil.throwIaxIfNull(out, "out");
+ Properties props = getOptionAliases();
+ if (null == props) {
+ return;
+ }
+ int pdLength = SYNTAX_PAD.length();
+ Set entries = props.entrySet();
+ for (Iterator iter = entries.iterator(); iter.hasNext();) {
+ Map.Entry entry = (Map.Entry) iter.next();
+ String alias = " " + (String) entry.getKey();
+ int buf = pdLength - alias.length();
+ if (0 < buf) {
+ alias += SYNTAX_PAD.substring(0, buf);
+ } else {
+ alias += " ";
+ }
+ out.println(alias + entry.getValue());
+ }
+ }
+
+ /** result struct for run(AjcTest.Spec) */
+ public static class RunResult {
+ public final IRunStatus status;
+ public final int numIncomplete;
+ public RunResult(IRunStatus status, int numIncomplete) {
+ this.status = status;
+ this.numIncomplete = numIncomplete;
+ }
+ }
+ /** feature implemented as named IRunIterator/IRun association */
+ public static class Feature {
+ /** never null, always assignable to IRun */
+ public final Class clazz;
+
+ /** never null */
+ public final IRunListener listener;
+
+ /** never null or empty */
+ public final String name;
+
+ /** @throws IllegalArgumentException if any is null/empty or clazz is
+ * not assignable to IRun
+ */
+ public Feature(String name, Class clazz, IRunListener listener) {
+ LangUtil.throwIaxIfNull(clazz, "class");
+ if (!IRun.class.isAssignableFrom(clazz)
+ && !IRunIterator.class.isAssignableFrom(clazz)) {
+ String s = clazz.getName() + "is not assignable to IRun or IRunIterator";
+ LangUtil.throwIaxIfFalse(false, s);
+ }
+ LangUtil.throwIaxIfNull(listener, "listener");
+ LangUtil.throwIaxIfNull(name, "name");
+ LangUtil.throwIaxIfFalse(0 < name.length(), "empty name");
+ this.clazz = clazz;
+ this.listener = listener;
+ this.name = name;
+ }
+
+ /** @return feature name */
+ public String toString() {
+ return name;
+ }
+ }
+}
+
+
+/**
+ * Harness with features for controlling output
+ * (logging results and hiding streams).
+ * Use -help to get a list of feature options.
+ */
+class FeatureHarness extends Harness {
+
+ private static final String[] ALIASES = new String[]
+ { "-hideStreams",
+ "-hideCompilerStreams"
+ + OPTION_DELIM + "-hideRunStreams",
+ "-jim",
+ "-logMinFail"
+ + OPTION_DELIM + "-hideStreams",
+ "-loud",
+ "-verboseHarness",
+ "-baseline",
+ "-verboseHarness"
+ + OPTION_DELIM + "-traceTestsMin"
+ + OPTION_DELIM + "-hideStreams",
+ "-release",
+ "-baseline"
+ + OPTION_DELIM + "-ajctestSkipKeywords=knownLimitation,purejava",
+ "-junit",
+ "-silentHarness" + OPTION_DELIM + "-logJUnit" + OPTION_DELIM +
+ "-hideStreams",
+ "-cruisecontrol",
+ "-junit" + OPTION_DELIM + "-ajctestSkipKeywords=knownLimitation,purejava"
+ };
+ static {
+ Properties optionAliases = Harness.getOptionAliases();
+ if (null != optionAliases) {
+ for (int i = 1; i < ALIASES.length; i += 2) {
+ optionAliases.put(ALIASES[i-1], ALIASES[i]);
+ }
+ }
+ }
+
+ /** controller for suppressing and sniffing error and output streams. */
+ StreamsHandler streamsHandler;
+
+ /** facility of hiding-streams may be applied in many features */
+ IRunListener streamHider;
+
+ /** facility of capture/log may be applied in many features */
+ IRunListener captureLogger;
+
+ /** when making tests, do not run them */
+ TestMaker testMaker;
+
+ public FeatureHarness() {
+ super();
+ streamsHandler = new StreamsHandler(false, true);
+ }
+ /** override to make tests or run as usual */
+ protected RunResult run(AjcTest.Suite.Spec spec) {
+ if (null != testMaker) {
+ System.out.println("generating rather than running tests...");
+ return testMaker.run(spec);
+ } else {
+ return super.run(spec);
+ }
+ }
+
+ /**
+ * Log via StreamsHandler-designated log stream.
+ * @see org.aspectj.testing.drivers.Harness#log(String)
+ */
+ protected void logln(String s) {
+ if (!silentHarness)
+ streamsHandler.lnlog(s);
+ }
+
+ /**
+ * @see org.aspectj.testing.drivers.Harness#getLogStream()
+ * @return StreamsHandler-designated log stream.
+ */
+ protected PrintStream getLogStream() {
+ return streamsHandler.out;
+ }
+
+
+ /** print detail message for syntax of main(String[]) command-line */
+ protected void printSyntax(PrintStream out) {
+ super.printSyntax(out);
+ out.println(" -progressDots log . or ! for each AjcTest pass or fail");
+ out.println(" -logFail log each failed AjcTest");
+ out.println(" -logPass log each passed AjcTest");
+ out.println(" -logAll log each AjcTest");
+ out.println(" -logMinFail log each AjcTest failure with minimal excess data");
+ out.println(" -logMinPass log each AjcTest success with minimal excess data");
+ out.println(" -logMinAll log all AjcTest with minimal excess data");
+ out.println(" -logXMLFail log XML definition for each failed AjcTest");
+ out.println(" -logXMLPass log XML definition for each passed AjcTest");
+ out.println(" -logXMLAll log XML definition for each AjcTest");
+ out.println(" -logJUnit log all tests in JUnit XML report style");
+ out.println(" -hideRunStreams hide err/out streams during java runs");
+ out.println(" -hideCompilerStreams hide err/out streams during compiler runs");
+ out.println(" -traceTests log pass|fail, /time/memory taken after each test");
+ out.println(" -traceTestsMin log pass|fail after each test");
+ out.println(" -XmakeTests create source files/dirs for initial compile run of each test");
+ out.println(" -XlogPublicType log test XML if \"public type\" in an error message");
+ out.println(" -XlogSourceIn=Y,Z log test XML if Y or Z is in path of any sources");
+ super.printAliases(out);
+ }
+
+ /** Accept a number of logging and output options */
+ protected boolean acceptOption(String option) {
+ if (null == option) {
+ return false;
+ }
+
+ final StreamsHandler streams = streamsHandler;
+ final IRunValidator validator = RunValidator.NORMAL;
+ final RunUtils.IRunStatusPrinter verbose
+ = RunUtils.VERBOSE_PRINTER;
+ final RunUtils.IRunStatusPrinter terse
+ = RunUtils.TERSE_PRINTER;
+// final boolean LOGPASS = true;
+// final boolean LOGFAIL = true;
+// final boolean SKIPPASS = false;
+// final boolean SKIPFAIL = false;
+// final boolean LOGSTREAMS = true;
+ final boolean SKIPSTREAMS = false;
+
+ Feature feature = null;
+ if (super.acceptOption(option)) {
+ // ok, result returned below
+
+ } else if ("-XmakeTests".equals(option)) {
+ testMaker = TestMaker.ME;
+ } else if (option.startsWith("-traceTestsMin")) {
+ feature = new Feature(option, AjcTest.class,new TestTraceLogger(streams, false));
+ } else if (option.startsWith("-traceTests")) {
+ feature = new Feature(option, AjcTest.class,new TestTraceLogger(streams, true));
+ } else if (option.startsWith("-logMin")) {
+ feature = new Feature(option, AjcTest.class,
+ new RunLogger(option, SKIPSTREAMS, streams, validator, terse));
+ } else if (option.startsWith("-logXML")) {
+ feature = new Feature(option, AjcTest.class,
+ new XmlLogger(option, streams, validator));
+ } else if (option.startsWith("-logJUnit")) {
+ feature = new Feature(option, AjcTest.class,
+ new JUnitXMLLogger(option,streams,validator));
+ } else if (option.startsWith("-log")) {
+ feature = new Feature(option, AjcTest.class,
+ new RunLogger(option, SKIPSTREAMS, streams, validator, verbose));
+ } else if ("-hideRunStreams".equals(option)) {
+ feature = new Feature(option, JavaRun.class, getStreamHider());
+ } else if ("-hideCompilerStreams".equals(option)) {
+ addFeature(new Feature(option, IncCompilerRun.class, getStreamHider())); // hmmm
+ feature = new Feature(option, CompilerRun.class, getStreamHider());
+ } else if ("-progressDots".equals(option)) {
+ IRunListener listener = new RunListener() {
+ public void runCompleted(IRunStatus run) {
+ streamsHandler.log((validator.runPassed(run) ? "." : "!"));
+ }
+ };
+ feature = new Feature(option, AjcTest.class, listener);
+ } else if (option.startsWith("-XlogPublicType")) {
+ String label = option + TestCompleteListener.PASS; // print when validator true
+ feature = new Feature(option, AjcTest.class,
+ new XmlLogger(label, streams, MessageRunValidator.PUBLIC_TYPE_ERROR));
+ } else if (option.startsWith("-XlogSourceIn")) {
+ String input = option.substring("-XlogSourceIn=".length());
+ LangUtil.throwIaxIfFalse(0 < input.length(), option);
+ String label = "-XlogSourceIn=" + TestCompleteListener.PASS; // print when validator true
+ StringRunner sr = new SubstringRunner(input, false);
+ feature = new Feature(option, AjcTest.class,
+ new XmlLogger(label, streams, new SourcePathValidator(sr)));
+ } else {
+ return false;
+ }
+ addFeature(feature);
+ return true;
+ }
+
+ /** lazy construction for shared hider */
+ protected IRunListener getStreamHider() {
+ if (null == streamHider) {
+ streamHider = new RunListener() {
+ public void runStarting(IRunStatus run) {
+ streamsHandler.hide();
+ }
+ public void runCompleted(IRunStatus run) {
+ streamsHandler.show();
+ }
+ public String toString() { return "Harness StreamHider"; }
+ };
+ }
+ return streamHider;
+ }
+}
+
+/** Generate any needed test case files for any test. */
+class TestMaker {
+
+ static TestMaker ME = new TestMaker();
+
+ /** @throws Error if unable to make dir */
+ static void mkdirs(File dir) {
+ if (null != dir && !dir.exists()) {
+ if (!dir.mkdirs()) {
+ throw new Error("unable to make dir: " + dir);
+ }
+ }
+ }
+ static String getFileContents(File baseDir, File file, String label) {
+ String fileName = file.getName();
+ if (fileName.endsWith(".java")) {
+ fileName = fileName.substring(0, fileName.length() - 5);
+ }
+ StringBuffer sb = new StringBuffer();
+ String filePath = file.getParentFile().getAbsolutePath();
+ String dirPath = baseDir.getAbsolutePath();
+ String pack = null;
+ if (filePath.startsWith(dirPath)) {
+ pack = filePath.substring(dirPath.length()).replace('/', '.');
+ }
+ if (!LangUtil.isEmpty(pack)) {
+ sb.append("package " + pack + ";");
+ }
+ final String EOL = "\n"; // XXX find discovered EOL
+ sb.append( EOL
+ + EOL + "import org.aspectj.testing.Tester;"
+ + EOL + ""
+ + EOL + "/** @testcase " + label + " */"
+ + EOL + "public class " + fileName + " {"
+ + EOL + "\tpublic static void main(String[] args) { "
+ + EOL + "\t\tTester.check(null != args, \"null args\"); "
+ + EOL + "\t}"
+ + EOL + "}"
+ + EOL
+ );
+
+ return sb.toString();
+ }
+
+ /** create a minimal source file for a test */
+ static void createSrcFile(File baseDir, File file, String testName) {
+ if (file.exists()) {
+ return;
+ }
+ String contents = getFileContents(baseDir, file, testName);
+ String error = FileUtil.writeAsString(file, contents);
+ if (null != error) {
+ throw new Error(error);
+ }
+ }
+
+ /** create an empty arg file for a test */
+ static void createArgFile(File baseDir, File file, String testName) {
+ if (file.exists()) {
+ return;
+ }
+ String contents = "// argfile " + file;
+ String error = FileUtil.writeAsString(file, contents);
+ if (null != error) {
+ throw new Error(error);
+ }
+ }
+
+ public Harness.RunResult run(AjcTest.Suite.Spec spec) {
+ ArrayList kids = spec.getChildren();
+ for (Iterator iter = kids.iterator(); iter.hasNext();) {
+ makeTest( (AjcTest.Spec) iter.next());
+ }
+ IRunStatus status = new RunStatus(new MessageHandler(), new Runner());
+ status.start();
+ status.finish(IRunStatus.PASS);
+ return new Harness.RunResult(status, 0);
+ }
+
+ private void makeTest(AjcTest.Spec spec) {
+ CompilerRun.Spec compileSpec = AjcTest.unwrapCompilerRunSpec(spec);
+ if (null == spec) {
+ throw new Error("null spec");
+ }
+ System.out.println(" generating test files for test: " + spec.getDescription());
+ File dir = spec.getSuiteDir();
+ if (null != dir) {
+ TestMaker.mkdirs(dir);
+ }
+ String offset = spec.getTestDirOffset();
+ if (!LangUtil.isEmpty(offset)) {
+ if (null == dir) {
+ dir = new File(offset);
+ } else {
+ dir = new File(dir.getAbsolutePath() + "/" + offset);
+ }
+ } else if (null == dir) {
+ dir = new File(".");
+ }
+ StringBuffer testName = new StringBuffer();
+ int pr = spec.getBugId();
+ if (0 < pr) {
+ testName.append("PR#" + pr + " ");
+ }
+
+ testName.append(spec.getDescription());
+ final String label = testName.toString();
+ final File[] srcFiles = FileUtil.getBaseDirFiles(dir, compileSpec.getPathsArray());
+ if (!LangUtil.isEmpty(srcFiles)) {
+ for (int i = 0; i < srcFiles.length; i++) {
+ TestMaker.createSrcFile(dir, srcFiles[i], label);
+ }
+ }
+ final File[] argFiles = FileUtil.getBaseDirFiles(dir, compileSpec.getArgfilesArray());
+ if (!LangUtil.isEmpty(argFiles)) {
+ for (int i = 0; i < argFiles.length; i++) {
+ TestMaker.createArgFile(dir, argFiles[i], label);
+ }
+ }
+
+ }
+
+ /** @return "Testmaker()" */
+ public String toString() {
+ return "TestMaker()";
+ }
+}
+
+interface StringRunner {
+ boolean accept(String s);
+}
+
+/**
+ * StringRunner than accepts input matching 0+ substrings,
+ * optionally case-insensitive.
+ */
+class SubstringRunner implements StringRunner {
+ private static String[] extractSubstrings(
+ String substrings,
+ boolean caseSensitive) {
+ if (null == substrings) {
+ return null;
+ }
+ StringTokenizer st = new StringTokenizer(substrings, ",");
+ String[] result = new String[st.countTokens()];
+ for (int i = 0; i < result.length; i++) {
+ result[i] = st.nextToken().trim();
+ LangUtil.throwIaxIfFalse(0 < result[i].length(), "empty entry");
+ if (!caseSensitive) {
+ result[i] = result[i].toLowerCase();
+ }
+ }
+ return result;
+ }
+
+ private final String[] substrings;
+ private final boolean caseSensitive;
+
+ /**
+ * @param substrings the String containing comma-separated substrings
+ * to find in input - if null, any input accepted
+ * @param caseSensitive if true, do case-sensitive comparison
+ * @throws IllegalArgumentException if any substrings contains empty entry ", ,"
+ */
+ SubstringRunner(String substrings, boolean caseSensitive) {
+ this.caseSensitive = caseSensitive;
+ this.substrings = extractSubstrings(substrings, caseSensitive);
+ }
+
+ public boolean accept(String input) {
+ if (null == substrings) {
+ return true;
+ }
+ if (null == input) {
+ return false;
+ }
+
+ if (!caseSensitive) {
+ input = input.toLowerCase();
+ }
+ for (int i = 0; i < substrings.length; i++) {
+ if (-1 != input.indexOf(substrings[i])) {
+ return true;
+ }
+ }
+ return false;
+ }
+}
+
+/**
+ * Signal whether run "passed" based on validating absolute source paths.
+ * (Static evaluation - no run necessary)
+ */
+class SourcePathValidator implements IRunValidator { // static - no run needed
+ private final StringRunner validator;
+ // XXX hoist common
+ SourcePathValidator(StringRunner validator) {
+ LangUtil.throwIaxIfNull(validator, "validator");
+ this.validator = validator;
+ }
+ /**
+ * @return true if any source files in compile spec are
+ * accepted by the validator.
+ * @see org.aspectj.testing.run.IRunValidator#runPassed(IRunStatus)
+ */
+ public boolean runPassed(IRunStatus run) {
+ AjcTest.Spec testSpec = AjcTest.unwrapSpec(run);
+ if (null != testSpec) {
+ CompilerRun.Spec compileSpec = AjcTest.unwrapCompilerRunSpec(testSpec);
+ File basedir = new File(testSpec.getSuiteDir(), testSpec.getTestDirOffset());
+ String[] paths = compileSpec.getPathsArray();
+ File[] files = FileUtil.getBaseDirFiles(basedir, paths);
+ for (int i = 0; i < files.length; i++) {
+ if (validator.accept(files[i].getAbsolutePath())) {
+ return true;
+ }
+ }
+ }
+ return false;
+ }
+
+}
+
+/** Signal whether run "passed" based on message kind and content */
+class MessageRunValidator implements IRunValidator {
+
+ /** signals "passed" if any error contains "public type" */
+ static final IRunValidator PUBLIC_TYPE_ERROR
+ = new MessageRunValidator("public type", IMessage.ERROR, false);
+
+ private final IMessage.Kind kind;
+ private final String sought;
+ private final boolean orGreater;
+
+ /**
+ * @param sought the String to seek anywhere in any message of the right kind
+ * if null, accept any message of the right kind.
+ * @param kind the IMessage.Kind of messages to search - all if null
+ */
+ MessageRunValidator(String sought, IMessage.Kind kind, boolean orGreater) {
+ this.sought = sought;
+ this.kind = kind;
+ this.orGreater = orGreater;
+ }
+
+ /** @return true if this run has messages of the right kind and text */
+ public boolean runPassed(IRunStatus run) {
+ return gotMessage(new IRunStatus[] {run});
+ }
+
+ /**
+ * Search these children and their children recursively
+ * for messages of the right kind and content.
+ * @return true at first match of message of the right kind and content
+ */
+ private boolean gotMessage(IRunStatus[] children) {
+ if (LangUtil.isEmpty(children)) {
+ return false;
+ }
+ for (int i = 0; i < children.length; i++) {
+ IRunStatus run = children[i];
+ if (null == run) {
+ continue; // hmm
+ }
+ IMessage[] messages = run.getMessages(kind, orGreater);
+ if (!LangUtil.isEmpty(messages)) {
+ if (LangUtil.isEmpty(sought)) {
+ return true;
+ } else {
+ for (int j = 0; j < messages.length; j++) {
+ if (null == messages[j]) {
+ continue; // hmm
+ }
+ String text = messages[j].getMessage();
+ if ((null != text) && (-1 != text.indexOf(sought))) {
+ return true;
+ }
+ }
+ }
+ }
+ if (gotMessage(run.getChildren())) {
+ return true;
+ }
+ }
+ return false;
+ }
+}
+
+/**
+ * Base class for listeners that run depending on pass/fail status of input.
+ * Template method runCompleted handled whether to run.
+ * Subclasses implement doRunCompleted(..).
+ */
+abstract class TestCompleteListener extends RunListener {
+ /** label suffix indicating both pass and fail */
+ public static final String ALL = "All";
+
+ /** label suffix indicating fail */
+ public static final String FAIL = "Fail";
+
+ /** label suffix indicating pass */
+ public static final String PASS = "Pass";
+
+
+ /** runValidator determines if a given run passed */
+ protected final IRunValidator runValidator;
+
+ /** label for this listener */
+ final String label;
+
+ /** if trun and run passed, then run doRunCompleted(..) */
+ final boolean logOnPass;
+
+ /** if true and run did not pass, then run doRunCompleted(..) */
+ final boolean logOnNotPass;
+
+ /** may be null */
+ protected final StreamsHandler streamsHandler;
+
+ /** true if the last run evaluation was ok */
+ boolean lastRunOk;
+
+ /** last run evaluated */
+ IRunStatus lastRun; // XXX small memory leak - cache hashcode instead?
+
+ /** @param label endsWith PASS || FAIL || ALL */
+ protected TestCompleteListener(
+ String label,
+ IRunValidator runValidator,
+ StreamsHandler streamsHandler) {
+ if (null == runValidator) {
+ runValidator = RunValidator.NORMAL;
+ }
+ this.label = (null == label? "" : label);
+ this.logOnPass = label.endsWith(PASS) || label.endsWith(ALL);
+ this.logOnNotPass = label.endsWith(FAIL) || label.endsWith(ALL);
+ this.runValidator = runValidator;
+ this.streamsHandler = streamsHandler;
+ }
+
+ public void runStarted(IRunStatus run) {
+ if (null != streamsHandler) {
+ streamsHandler.startListening();
+ }
+ }
+
+ /** subclasses implement this to do some per-test initialization */
+ protected void doRunStarted(IRunStatus run) {
+ }
+
+
+ /** subclasses implement this to do some per-suite initialization */
+ protected void doStartSuite(File suite) {
+ }
+
+ /** subclasses implement this to do end-of-suite processing */
+ protected void doEndSuite(File suite, long duration) {
+ }
+
+ public final void runCompleted(IRunStatus run) {
+ boolean doit = lastRunOk(run);
+ StreamsHandler.Result result = null;
+ if (null != streamsHandler) {
+ streamsHandler.endListening(doit);
+ }
+ if (doit) {
+ doRunCompleted(run, result);
+ }
+ }
+
+ /**
+ * @return true if run is ok per constructor specifications
+ */
+ protected boolean lastRunOk(IRunStatus run) {
+ if (lastRun != run) {
+ boolean passed = runValidator.runPassed(run);
+ lastRunOk = (passed ? logOnPass : logOnNotPass);
+ }
+ return lastRunOk;
+ }
+
+ /** @return "{classname}({pass}{,fail})" indicating when this runs */
+ public String toString() { // XXX add label?
+ return LangUtil.unqualifiedClassName(this)
+ + "(" + (logOnPass ? (logOnNotPass ? "pass, fail)" : "pass)")
+ : (logOnNotPass ? "fail)" : ")"));
+ }
+ /**
+ * Subclasses implement this to do some completion action
+ * @param run the IRunStatus for this completed run
+ * @param result the StreamsHandler.Result (if any - may be null)
+ */
+ public abstract void doRunCompleted(IRunStatus run, StreamsHandler.Result result);
+}
+
+/**
+ * Write XML for any test passed and/or failed.
+ * Must register with Runner for RunSpecIterator.class,
+ * most sensibly AjcTest.class.
+ */
+class XmlLogger extends TestCompleteListener {
+ /**
+ * @param printer the component that prints any status - not null
+ * @param runValidator if null, use RunValidator.NORMAL
+ */
+ public XmlLogger(
+ String label,
+ StreamsHandler streamsHandler,
+ IRunValidator runValidator) {
+ super(label, runValidator, streamsHandler);
+ }
+
+ public void doRunCompleted(IRunStatus run, StreamsHandler.Result result) {
+ PrintStream out = streamsHandler.getLogStream();
+ out.println("");
+ XMLWriter writer = new XMLWriter(new PrintWriter(out, true));
+ Object id = run.getIdentifier();
+ if (!(id instanceof Runner.IteratorWrapper)) {
+ out.println(this + " not IteratorWrapper: "
+ + id.getClass().getName() + ": " + id);
+ return;
+ }
+ IRunIterator iter = ((Runner.IteratorWrapper) id).iterator;
+ if (!(iter instanceof RunSpecIterator)) {
+ out.println(this + " not RunSpecIterator: " + iter.getClass().getName()
+ + ": " + iter);
+ return;
+ }
+ ((RunSpecIterator) iter).spec.writeXml(writer);
+ out.flush();
+ }
+
+}
+
+/**
+ * Write junit style XML output (for incorporation into html test results and
+ * cruise control reports
+ * format is...
+ * <?xml version="1.0" encoding="UTF-8" ?>
+ * <testsuite errors="x" failures="x" name="suite-name" tests="xx" time="ss.ssss">
+ * <properties/>
+ * <testcase name="passingTest" time="s.hh"></testcase>
+ * <testcase name="failingTest" time="s.hh">
+ * <failure message="failureMessage" type="ExceptionType">free text</failure>
+ * </testcase>
+ * </testsuite>
+ */
+class JUnitXMLLogger extends TestCompleteListener {
+
+// private File suite;
+ private StringBuffer junitOutput;
+ private long startTimeMillis;
+ private int numTests = 0;
+ private int numFails = 0;
+ private DecimalFormat timeFormatter = new DecimalFormat("#.##");
+
+ public JUnitXMLLogger(
+ String label,
+ StreamsHandler streamsHandler,
+ IRunValidator runValidator) {
+ super(label + ALL, runValidator, streamsHandler);
+ junitOutput = new StringBuffer();
+ }
+
+ /* (non-Javadoc)
+ * @see org.aspectj.testing.drivers.TestCompleteListener#doRunCompleted(org.aspectj.testing.run.IRunStatus, org.aspectj.testing.util.StreamsHandler.Result)
+ */
+ public void doRunCompleted(IRunStatus run, Result result) {
+ long duration = System.currentTimeMillis() - startTimeMillis;
+ numTests++;
+ junitOutput.append("<testcase name=\"" + run.getIdentifier() + "\" ");
+ junitOutput.append("time=\"" + timeFormatter.format((duration)/1000.0f) + "\"");
+ junitOutput.append(">");
+ if (!run.runResult()) {
+ numFails++;
+ junitOutput.append("\n");
+ junitOutput.append("<failure message=\"test failed\" type=\"unknown\">\n");
+// junitOutput.println(result.junitOutput);
+// junitOutput.println(result.err);
+ junitOutput.append("</failure>\n");
+ }
+ junitOutput.append("</testcase>\n");
+ }
+
+ /* (non-Javadoc)
+ * @see org.aspectj.testing.drivers.TestCompleteListener#runStarted(org.aspectj.testing.run.IRunStatus)
+ */
+ public void runStarting(IRunStatus run) {
+ super.runStarting(run);
+ startTimeMillis = System.currentTimeMillis();
+ }
+
+ /* (non-Javadoc)
+ * @see org.aspectj.testing.drivers.TestCompleteListener#doEndSuite(java.io.File, long)
+ */
+ protected void doEndSuite(File suite, long duration) {
+ super.doEndSuite(suite, duration);
+ String suiteName = suite.getName();
+ // junit reporter doesn't like ".xml" on the end
+ suiteName = suiteName.substring(0,suiteName.indexOf('.'));
+ PrintStream out = streamsHandler.getLogStream();
+ out.println("<?xml version=\"1.0\" encoding=\"UTF-8\"?>");
+ String timeStr = new DecimalFormat("#.##").format(duration/1000.0);
+ out.print("<testsuite errors=\"" + numFails + "\" failures=\"0\" ");
+ out.print("name=\"" + suite.getName() + "\" " );
+ out.println("tests=\"" + numTests + "\" time=\"" + timeStr + "\">");
+ out.print(junitOutput.toString());
+ out.println("</testsuite>");
+ }
+
+ /* (non-Javadoc)
+ * @see org.aspectj.testing.drivers.TestCompleteListener#doStartSuite(java.io.File)
+ */
+ protected void doStartSuite(File suite) {
+ super.doStartSuite(suite);
+// this.suite = suite;
+ numTests = 0;
+ numFails = 0;
+ junitOutput = new StringBuffer();
+ }
+
+}
+
+/** log pass and/or failed runs */
+class RunLogger extends TestCompleteListener {
+ final boolean logStreams;
+ final RunUtils.IRunStatusPrinter printer;
+
+ /**
+ * @param printer the component that prints any status - not null
+ * @param runValidator if null, use RunValidator.NORMAL
+ */
+ public RunLogger(
+ String label,
+ boolean logStreams,
+ StreamsHandler streamsHandler,
+ IRunValidator runValidator,
+ RunUtils.IRunStatusPrinter printer) {
+ super(label, runValidator, streamsHandler);
+ LangUtil.throwIaxIfNull(streamsHandler, "streamsHandler");
+ LangUtil.throwIaxIfNull(printer, "printer");
+ this.logStreams = logStreams;
+ this.printer = printer;
+ }
+
+ public void doRunCompleted(IRunStatus run, StreamsHandler.Result result) {
+ PrintStream out = streamsHandler.getLogStream();
+ printer.printRunStatus(out, run);
+ if (logStreams) {
+ if (!LangUtil.isEmpty(result.err)) {
+ out.println("--- error");
+ out.println(result.err);
+ }
+ if (!LangUtil.isEmpty(result.out)) {
+ out.println("--- ouput");
+ out.println(result.out);
+ }
+ }
+ out.println("");
+ }
+}
+
+/** trace time and memory between runStaring and runCompleted */
+class TestTraceLogger extends TestCompleteListener {
+ private static final Runtime runtime = Runtime.getRuntime();
+ private long startTime;
+ private long startMemoryFree;
+ private final boolean verbose;
+
+ public TestTraceLogger(StreamsHandler handler) {
+ this(handler, true);
+ }
+ public TestTraceLogger(StreamsHandler handler, boolean verbose) {
+ super("-traceTestsAll", null, handler);
+ this.verbose = verbose;
+ }
+ public void runStarting(IRunStatus run) {
+ super.runStarting(run);
+ startTime = System.currentTimeMillis();
+ startMemoryFree = runtime.freeMemory();
+ }
+
+ public void doRunCompleted(IRunStatus run, StreamsHandler.Result result) {
+ long elapsed = System.currentTimeMillis() - startTime;
+ long free = runtime.freeMemory();
+ long used = startMemoryFree - free;
+ String label = run.runResult() ? "PASS " : "FAIL ";
+ PrintStream out = streamsHandler.getLogStream();
+ if (verbose) {
+ label = label
+ + "elapsed: " + LangUtil.toSizedString(elapsed, 7)
+ + " free: " + LangUtil.toSizedString(free, 10)
+ + " used: " + LangUtil.toSizedString(used, 10)
+ + " id: ";
+ }
+ out.println(label + renderId(run));
+ }
+
+ /** @return true - always trace tests */
+ protected boolean isFailLabel(String label) {
+ return true;
+ }
+
+ /** @return true - always trace tests */
+ protected boolean isPassLabel(String label) {
+ return true;
+ }
+
+ /**
+ * This implementation returns run identifier toString().
+ * Subclasses override this to render id as message suffix.
+ */
+ protected String renderId(IRunStatus run) {
+ return "" + run.getIdentifier();
+ }
+}
+ // printing files
+// AjcTest.Spec testSpec = AjcTest.unwrapSpec(run);
+// if (null != testSpec) {
+// CompilerRun.Spec compileSpec = AjcTest.unwrapCompilerRunSpec(testSpec);
+// File dir = new File(testSpec.getSuiteDir(), testSpec.getTestDirOffset());
+// List files = compileSpec.getPathsAsFile(dir);
+// StringBuffer sb = new StringBuffer();
+// for (Iterator iter = files.iterator(); iter.hasNext();) {
+// File file = (File) iter.next();
+// sb.append(" " + file.getPath().replace('\\','/').substring(2));
+// }
+// out.println("files: " + sb);
+// }
+
+
--- /dev/null
+<html>
+ <head><title>Harness Package Documentation</title></head>
+<body>
+<p>
+The AspectJ compiler test harness can compile and run AspectJ programs
+as specified by the test definitions.
+This document tells you how to run the harness.
+It describes the options you can specify on the command-line to
+control various components that form the harness, either to
+specify options that augment the test definitions or to
+change how the harness works, e.g., selecting particular tests
+or logging more information.
+For information on how to write a test definition, see
+<code>readme-writing-compiler-tests.html</code> in the testing module.
+</p>
+<p>
+The harness drives compiler tests, using
+a chain of responsibility to map elements
+in the schema of a test definition to implementing classes.
+
+</p>
+<table border="1" cellpadding="1">
+<tr><th align="left">Test feature</th>
+ <th align="left">Description</th>
+ <th align="left">Implementing class</th>
+</tr>
+<tr><td>(loading suites...)</td>
+ <td>general harness</td>
+ <td>Harness</td>
+</tr>
+<tr><td>(logging...)</td>
+ <td>subclass feature harness</td>
+ <td>FeatureHarness</td>
+</tr>
+<tr><td><code><suite></code></td>
+ <td>Test suite</td>
+ <td>AjcTest.Suite</td>
+</tr>
+<tr><td> <code><ajc-test></code></td>
+ <td>Test case</td>
+ <td>AjcTest</td>
+</tr>
+<tr><td> <code><compile></code></td>
+ <td>Initial (batch) compile run</td>
+ <td>CompilerRun</td>
+</tr>
+<tr><td> <code><inc-compile></code></td>
+ <td>Incremental re-compile</td>
+ <td>IncCompilerRun</td>
+</tr>
+<tr><td> <code><run></code></td>
+ <td>Run class</td>
+ <td>JavaRun</td>
+</tr>
+</table>
+<!--
+ general harness (Harness)
+ subclass feature harness (FeatureHarness)
+ <ajc-test> run component (AjcTest)
+ <compile> {sub-} run component (CompilerRun)
+ <inc-compile> {sub-} run component (IncCompilerRun)
+ <run> {sub-} run component (JavaRun)
+ ...
+-->
+<p/>
+The compiler used is the AspectJ compiler <code>ajc</code>
+(optionally as wrapped by the Ant task or AJDE API's), but
+in principle any compiler accepting similar options can be
+used.
+<p/>
+To run from the command-line, use
+<code>Harness.main(String[])</code>.
+To run programmatically, use <code>Harness.getHarness()</code>.
+<code>Harness.runMain(String[])</code> takes arguments that
+each component in the chain may accept and interpret, so
+you can modify how the tests run by specifying the following
+arguments on the harness command line:
+<p/>
+<table cellpadding="1" border="1">
+<tr><th>Component</th><th>Options</th></tr>
+
+<tr><td rowspan="6" valign="top">Harness
+ <p>suite files, harness verbosity, temp files, option variants
+ </p></td></tr>
+ <tr><td><u>suite files</u>: ajcTest-compliant .txt or .xml files are accepted.
+ <!-- XXX link to ajcTestSuite.dtd and .txt definitions -->
+ </td></tr>
+ <tr><td><u><code>-verboseHarness</code></u>,
+ <u><code>-quietHarness</code></u>:
+ Log accepted options and skipped tests,
+ or do not print even info messages.
+ </td></tr>
+ <tr><td><u><code>-keepTemp</code></u>: Normally the harness saves temp files until
+ the end of the run, and deletes them. If you abort the run or specify
+ <code>-keepTemp</code>, then temporary (sandbox) directories will remain for analysis.
+ In either case, the file system accumulates all temporary directories
+ and files used for a give harness run; for the <code>ajcTests.xml</code>
+ suite, this runs into thousands of files.
+ </td></tr>
+ <tr><td><u><code>-killTemp</code></u>: The opposite of <code>-keepTemp</code>,
+ this causes the harness to delete temporary (sandbox) directories at
+ the end of each test run.
+ In this case, the file system only accumulates files for
+ the current test.
+ </td></tr>
+ <tr><td><u>*- variants</u>: Options with a trailing "-" cause two sets of
+ option lists to be produced, one with and one without the corresponding
+ option. E.g., "-emacssym-" will run the suite twice, once with and
+ once without the "-emacssym" flag.
+ That means if you use this on each of three options, you will
+ get 8 variant sets (1 with no options, 1 with all 3 options,
+ 3 with 2 options, and 3 with 1 option).
+ </td></tr>
+
+<tr><td rowspan="5" valign="top">FeatureHarness
+ <p>output and logging options
+ </p></td></tr>
+ <tr><td><u>tracing</u>:
+ <code>-progressDots</code> will print "." for every passed
+ test completed and "!" for every test completed but not passed.
+ <code>-traceTests</code> will print a one-line summary for each test
+ of the time and space taken and whether the test passed.
+ <code>-traceTestsMin</code> will print only the test and whether it passed.
+ <code>-baseline</code> is an alias for
+ <code>-traceTestsMin</code>
+ <code>-hideStreams</code> and
+ <code>!eclipse</code>, used to emit tests results in a form
+ comparable by <code>org.aspectj.testing.util.TestDiffs</code>.
+ or usable to select tests by title for options like
+ <code>-ajctestTitleList</code>.
+ </td></tr>
+
+ <tr><td><u>output</u>: <code>-hide{Compiler|Run}Streams</code> will prevent output and
+ error streams from being printed to System.err and System.out,
+ optionally only for run or compile steps.
+ </td></tr>
+ <tr><td><u>logging</u>:
+ Log variants take the form <code>-log{Min|Xml}[Fail|Pass|All]</code>.
+ The suffix {All|Pass|Fail} selects all tests or only passing or failing tests.
+ The infix {Min} means to log with minimal information, typically only any
+ fail messages.
+ The infix {Xml} means to log the XML form of the test definition, so that
+ you can inspect the input or re-run arbitrary tests.
+ (You can also re-run a set of tests using keywords
+ (e.g., "<code>-ajctestsRequireKeywords=...</code>" or using titles
+ (e.g., "<code>-ajctestsTitleFailList=ajcTestResults.txt</code>".)
+ Finally, the experimental option <code>-XlogPublicType</code> will
+ log the XML test definition for
+ any test run that emits any ERROR messages containing the text "public type".
+ </td></tr>
+ <tr><td><u>interaction of output streams and logging</u>:
+ Streams will be emitted in real-time,
+ <em>before</em> the test is logged, unless streams are hidden.
+ When logging in normal (non-Min or -XML) form, the log will emit the streams
+ with the test report, so e.g., you can use -hideStreams -logFail to
+ hide streams for passing tests but emit them for failing tests
+ in the context of the log.
+ </td></tr>
+
+<tr><td rowspan="5" valign="top">AjcTest
+ <p>selection options for keywords, bugID (PR), or title (description)
+ </p></td></tr>
+ <tr><td><u>keywords</u>: <code>-ajctest[Require|Skip]Keywords=one{,two}</code>
+ will either require or skip a test that has one of the
+ specified keywords.
+ </td></tr>
+ <tr><td><u>Bugs</u>: <code>-ajctestPR=101{,102}</code>
+ will require that a test have one of the listed bug id's.
+ </td></tr>
+ <tr><td><u>title</u>:
+ <code>"-ajctestTitleContains=one,two"</code>
+ will require that the title (description) of a test contain
+ one of the specified substrings, here either "one" or "two".
+ Use this to select a few tests you know generally.
+ <br/>
+ <code>"-ajctestTitleList=first title\, in theory, second title"</code>
+ will require that the title (description) of a test be
+ exactly "first title, in theory" or "second title".
+ The entire option must be one argument on the command line.
+ Use this when working with just a few specific tests.
+ <br/>
+ <code>"-ajctestTitleList=../tests/ajcTestResults.txt"</code>
+ will require that the title (description) of a test be
+ equal to one listed in <code>../tests/ajcTestResults.txt</code>
+ as a line of the form "[PASS|FAIL] {title}(.."
+ (This form is emitted by the <code>-traceTestsMin</code> option).
+ This option only differs from the prior in that the parameter
+ is a valid file to read.
+ Use this to re-run a large set of tests.
+ <br/>
+ <code>"-ajctestTitleFailList=../tests/ajcTestResults.txt"</code>
+ is the same as the <code>-ajctestTitleList={file}</code> variant,
+ except that only results prefixed "FAIL" are included.
+ Use this to re-run only the tests that failed from a large set.
+ </td></tr>
+
+ <tr><td><u>Combinations</u>: all selectors are applied to each test,
+ so all tests selected will comply with all constraints.
+ Specifying lists within a particular constraints will match
+ a union of tests for that constraint
+ (e.g., all tests with bug id's 101 or 102),
+ but there is no way to get a union of constraints
+ (e.g., all tests with bug id's 101 or 102 <em>or</em>
+ with keywords pure-java or knownLimitation).
+ However, <code>-ajctestSkipKeywords=...</code> can return all
+ tests without the specified keywords, so it can form unions like
+ "all tests without the knownLimitation keyword, but with
+ bug id's 101 or 102".
+ Title lists can work similarly. E.g., to run the failed
+ incremental tests from ajcTestResults.txt, specify
+ <code>-ajctestTitleFailList=../tests/ajcTestResults.txt</code>
+ <code>-ajctestRequireKeywords=incremental-test</code>.
+ </td></tr>
+
+<tr><td rowspan="6" valign="top">CompilerRun
+<p>compiler options and side-effects
+ </p></td></tr>
+ <tr><td><u>supported options</u>:
+ The command line passed to the compiler by <code>CompilerRun</code>
+ is composed of entries derived from the <code><compile></code>
+ attributes and recognized from the harness command line.
+ <code><compile></code> has specific attributes like
+ <code>files</code>,
+ <code>argfiles</code>,
+ <code>classpath</code> and
+ <code>sourceroot</code>
+ which translate directly to their counterparts.
+ The output option <code>-d</code> is defined by <code>CompilerRun</code> and
+ may not be specified (and <code>-outjar</code> is not supported).
+ Most other compiler options are defined in
+ <code>CompilerRun.Spec.CRSOptions</code> and may be specified
+ on the harness command-line
+ or in the <code>options</code> attribute of
+ <code><compile></code>.
+ In the <code>options</code> attribute, each argument is comma-delimited,
+ so an option with an argument would look like
+ <code><compile options="-source,1.4" ...></code>.
+ If options collide, duplicates
+ can be resolved using option dominance (below).
+ </td></tr>
+ <tr><td><u>compiler selectors</u>:
+ Use <code>-ajc</code> or <code>-eclipse</code> to select the old
+ (ajc 1.0) or new (eajc 1.1) compilers.
+ Note that the old compiler is not
+ available in the CVS source tree at eclipse.org.
+ Use <code>-ajdeCompiler</code> to run a wrapper around the
+ AJDE interface
+ and <code>-ajctaskCompiler</code> to run a wrapper around the
+ AjcTask (Ant task) interface.
+ </td></tr>
+ <tr><td><u>option dominance <code>[-|!|^]</code></u>:
+ Some tests require or prohibit certain options;
+ likewise, sometime you want to force all tests
+ run with or without an option specified on the command-line,
+ regardless of its setting in the <code><compile options=".." ...></code>
+ attribute.
+ For this reason an option may be specified in the options attribute
+ or on the harness command-line as
+ <code>-option</code>,
+ <code>!option</code>, or
+ <code>^option</code>.
+ <ul>
+ <li><u>- set</u>: If the leading character of an option is "-", then it is set unless forced-off.</li>
+ <li><u>^ force-off</u>: If the leading character of an option is "^", then it is forced off.
+ Any other matching option will be removed.</li>
+ <li><u>! force-on</u>: If the leading character of an option is "!", then it is forced on.
+ Any other non-force-on matching option will be removed.</li>
+ <li><u>force conflict</u>: If there are two matching force-on options, the test is skipped.</li>
+ <li><u>related options</u>: Two options match if they are the same or
+ if they are in the same family. For example, <code>-ajc</code> and
+ <code>eclipse</code> are both compiler, and <code>-source 1.4</code>
+ and <code>-source 1.3</code> are both source.
+ <br/>
+ </li>
+ </ul>
+ </td></tr>
+ <tr><td><u>auto-skip</u>: After combining global and local options, there may be
+ conflicting or impossible options, which cause the test to be skipped:
+ <ul>
+ <li><u>semantic conflicts</u>: two options may conflict in meaning
+ - e.g., <code>-lenient</code> and <code>-strict</code></li>
+ <li><u>impossible option</u>: It may not be possible in the current configuration to
+ implement an option - e.g., <code>-usejavac</code> or <code>-eclipse</code>
+ when javac or the eclipse implementation is not on the classpath
+ <br/></li>
+ </ul>
+ </td></tr>
+
+ <tr><td><u>source searching</u>: Given <code>-seek:{literal}</code>,
+ as a side-effect,
+ <code>CompilerRun</code> will search source files for {literal},
+ emitting for each instance an INFO message of the form:
+ <tt>found: {file}:{line}:{column}</tt>
+ (Note that the harness does not display INFO messages unless <tt>-verboseHarness</tt>
+ or <tt>-loud</tt> is used.)
+ </td></tr>
+
+
+ <tr><td rowspan="2" valign="top">JavaRun
+ <p>Options and forking</p></td>
+ <td><u>options</u>: options specified in the test are passed
+ to the main method as they would be on the command-line.
+ No options passed to the harness are passed through to
+ the main class.
+ </td></tr>
+ <tr><td><u>forking</u>:
+ Forking is useful to run in a different version of Java
+ than can be supported by the harness (i.e., some 1.1 flavor);
+ it's very time-consuming otherwise.
+ Currently forking is only controllable through system properties
+ of the invoking vm (defined in JavaRun.java):
+ <ul>
+ <li><u>javarun.fork</u>: anything to run in a new vm.
+ </li>
+ <li><u>javarun.java</u>: path to the java executable to run
+ (suffix included). If not supplied, the harness tries to
+ find the java that invoked the harness.
+ </li>
+ <li><u>javarun.java.home</u>: the value of the JAVA_HOME
+ environment variable, if it needs to be set.
+ </li>
+ <li><u>javarun.bootclasspath</u>: this is prepended to the
+ run classpath. Multiple entries must be separated by
+ the system-dependent path separator.
+ </li>
+ <li><u>javarun.vmargs</u>: this is added to the fork command-line
+ right after java. Multiple entries must be separated by a comma
+ (and the whole thing should be one parameter), e.g.,
+ <code>-Djavarun.vmargs=-Dname=value,-Dname2="value 2"</code>
+ </li>
+ </ul>
+ </td></tr>
+</table>
+<br/>
+Following are some sample configurations:
+<ul>
+<li><code>java {harness} -hideStreams {suiteFile}</code>
+ <p>Use this to output only a 1-line summary of the test results
+ (tests skipped, incomplete, failed, passed).<br/></p>
+ </li>
+
+<li><code>java {harness} -hideStreams -traceTestsMin {suiteFile} > results.txt</code>
+ <p>This writes to result.txt one line [PASS|FAIL] per test, plus a
+ 1-line summary of the test results.<br/></p>
+ </li>
+
+<li><code>java {harness} -logFail {suiteFile} -ajctestTitleFailList=results.txt</code>
+ <p>This re-runs any test that failed from the "results.txt" run,
+ verbosely logging any fails.<br/></p>
+ </li>
+
+<li><code>java {harness} -hideStreams -logMinFail {suiteFile}</code>
+ <p>Use this when running tests mainly to see if they pass or
+ if the failure messages are typically enough information
+ to indicate why the test is failing. It produces only minimal
+ output for failed tests.<br/></p>
+ </li>
+
+<li><code>java {harness} -hideStreams -verboseHarness -logFail {suiteFile}</code>
+ <p>When it's not clear at first glance why a test is failing, before
+ looking at the test code you can run it and print any harness or test
+ setup failures and all the associated messages from the test components.<br/></p>
+ </li>
+
+<li><code>java {harness} -hideStreams -usejavac- -ajc -Xlint- {suiteFile}</code>
+ <p>Because of the trailing '-' on two of the options,
+ this would do four complete runs with the old (Ajc 1.0) compiler: one with
+ no options, one with -lenient, one with -Xlint, and one with both.<br/></p>
+ </li>
+
+
+<li><code>java {harness} --ajctestPR=101,102 -Xlint- ^usejavac !eclipse {suiteFile}</code>
+ <p>Run any tests associated with bugs 101 and 102, with and without -Xlint,
+ forcing off -usejavac and forcing the use of the new eclipse-based compiler.<br/></p>
+ </li>
+
+</ul>
+
+If you have a set of options you use often, you can define a single-word
+option alias for it; see <code>Harness.optionAliases</code>.
+
+<br/><u>Configuration</u>: Most tests use the library jars in
+<code>modules/lib/test</code>, defined in
+<code>org.aspectj.testing.harness.bridge.Globals</code>.
+Normally the harness finds these by relative path
+<code>../lib/tests/*.jar</code>, which works whenever the tests are
+run from a peer module directory. When running tests elsewhere,
+define the environment variable <code>harness.libdir</code> - e.g.,
+<pre>
+ $ cd aspectj/tests
+ $ java -Dharness.libdir=../modules/lib/test ...
+</pre>
+
+<br/><u>Forking:</u>:
+The harness must be run in a compiler-compatible VM, and the
+compiler steps run in-process.
+However, the java steps can be run in forked mode, which is useful
+when compiling for a VM which can't run the compiler.
+To compile for a different target VM could require
+setting the options for bootclasspath, target, and source.
+To run the harness so that any <run.. tasks run in a
+separate vm, do something like this:
+<pre>
+ java -Djavarun.java=d:\jdk1.1.8\bin\java.exe \
+ -Djavarun.bootclasspath=d:\jdk1.1.8\lib\classes.zip \
+ -Djavarun.java.home=d:\jdk1.1.8 \
+ -Djavarun.fork=true \
+ -jar ../aj-build/jars/testing-drivers-all.jar \
+ ajcTests.xml -logFail
+</pre>
+
+Here <code>CompilerRun</code> would add the bootclasspath as such when compiling.
+JavaRun would fork using the 1.1 vm and prepend the bootclasspath
+to the classpath, with an effect like these commands
+(ignoring the line splitting in the classpath):
+<pre>
+ set JAVA_HOME=d:\jdk1.1.8
+ d:\jdk1.1.8\bin\java.exe \
+ -classpath "d:\jdk1.1.8\lib\classes.zip;
+ d:\aspectj-src\lib\test\testing-client.jar;
+ d:\aspectj-src\lib\test\aspectjrt.jar;
+ c:\TEMP\sandbox7wers\classes"
+ {mainClass} {option..}
+</pre>
+
+
+</body>
+</html>