1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
|
<?xml version="1.0"?>
<project name="Run Vaadin Testbench Tests" basedir="." default="run-and-clean-up">
<!-- ================================================================== -->
<!-- Configuration -->
<!-- ================================================================== -->
<!-- Browsers to use for testing -->
<property name="browsers-windows" value="winxp-ie6,winxp-ie7,winxp-ie8,win7-ie9,winxp-firefox36,winxp-firefox4,winxp-firefox5,winxp-safari4,winxp-safari5,winxp-googlechrome-stable,winxp-opera1060,winxp-opera11" />
<property name="browsers-linux" value="linux-firefox3,linux-opera10,linux-googlechrome8" />
<property name="browsers-mac" value="osx-firefox3,osx-opera10,osx-googlechrome8,osx-safari4,osx-safari5" />
<property name="browsers" value="${browsers-windows}" />
<!-- Number of times to retry a test if it fails -->
<property name="retries" value="2" />
<!-- Screen shot base directory -->
<fail unless="com.vaadin.testbench.screenshot.directory" message="The 'com.vaadin.testbench.screenshot.directory' property must be defined." />
<!-- Screen shot resolution -->
<property name="com.vaadin.testbench.screenshot.resolution" value="1500x850" />
<!-- Host running Testbench Hub -->
<property name="com.vaadin.testbench.tester.host" value="testbench-hub.intra.itmill.com" />
<property name="com.vaadin.testbench.screenshot.block.error" value="0.025"/>
<property name="com.vaadin.testbench.debug" value="false"/>
<!-- Temporary output directory, created and removed by this script -->
<!-- <property name="test-output-dir" value="../build/test-output" /> -->
<fail unless="test-output-dir" message="The 'test-output-dir' property must be defined." />
<property name="class-dir" value="${test-output-dir}/classes" />
<taskdef resource="net/sf/antcontrib/antlib.xml">
<classpath>
<pathelement location="../build/lib/ant-contrib-1.0b3.jar" />
</classpath>
</taskdef>
<!-- classpath must include test bench jar and its dependencies -->
<path id="classpath">
<fileset dir="${com.vaadin.testbench.lib.dir}" includes="**/*.jar" />
</path>
<!-- fileset containing all tests to run -->
<fileset dir=".." id="html-test-files">
<include name="tests/**/**.html" />
<include name="src/com/vaadin/tests/**/**.html" />
<exclude name="tests/integration-testscripts/**" />
</fileset>
<!-- This target converts HTML tests files to java junit tests. One test file for each browser is created. -->
<target name="create-tests" depends="remove-temp-testclasses" if="server.start.succeeded">
<pathconvert pathsep=" " property="testfiles" refid="html-test-files" />
<java classname="com.vaadin.testbench.util.TestConverter" classpathref="classpath" fork="true">
<sysproperty key="com.vaadin.testbench.test.retries" value="${retries}" />
<jvmarg value="-Duser.language=en"/>
<jvmarg value="-Duser.country=US"/>
<arg value="${test-output-dir}" />
<arg value="${browsers}" />
<arg line="${testfiles}" />
</java>
</target>
<!-- This target complies the generated java junit tests. -->
<target name="compile-tests" depends="create-tests">
<mkdir dir="${class-dir}" />
<javac srcdir="${test-output-dir}" destdir="${class-dir}" debug="on" fork="yes" failonerror="false" encoding="UTF8">
<classpath>
<path refid="classpath" />
</classpath>
</javac>
</target>
<!-- ================================================================== -->
<!-- Toolkit Server Management -->
<!-- ================================================================== -->
<target name="server-start">
<fail unless="output-dir" message="The 'output-dir' (usually build/result/vaadin-xxx) should be given to test script." />
<fail unless="package.name" message="The 'package.name' property must be defined." />
<fail unless="package.filename" message="The 'package.filename' property must be defined." />
<fail unless="testing.testarea" message="The 'testing.testarea' property must be defined." />
<echo>Package name: ${package.name}</echo>
<echo>Package filename: ${package.filename}</echo>
<echo>Testing area: ${testing.testarea}</echo>
<exec executable="python" searchpath="true" dir=".." resultproperty="server.start.result">
<arg value="build/testing/toolkit-server.py" />
<arg value="start" />
<arg value="${package.name}" />
<arg value="${package.filename}" />
<arg value="${output-dir}" />
<arg value="${testing.testarea}" />
</exec>
<condition property="server.start.succeeded">
<equals arg1="${server.start.result}" arg2="0" />
</condition>
</target>
<target name="server-stop">
<exec executable="python" dir=".." searchpath="true" resultproperty="server.stop.result">
<arg value="build/testing/toolkit-server.py" />
<arg value="stop" />
</exec>
</target>
<!-- ================================================================== -->
<!-- Running Tests -->
<!-- ================================================================== -->
<target name="check-parameters">
<fail unless="com.vaadin.testbench.lib.dir" message="The 'com.vaadin.testbench.lib.dir' property must be defined." />
<fail unless="com.vaadin.testbench.tester.host" message="The 'com.vaadin.testbench.tester.host' property must be defined." />
<fail unless="com.vaadin.testbench.deployment.url" message="The 'com.vaadin.testbench.deployment.url' property must be defined." />
</target>
<target name="run-tests" depends="compile-tests" if="server.start.succeeded">
<!-- Long tests are prioritized to even out load at the end of the test run -->
<!-- If you are interested in certain tests they can temporarily be added here -->
<fileset dir="${test-output-dir}" id="priority-tests-fileset">
<include name="**/reindeer*.java" />
<include name="**/runo*.java" />
</fileset>
<fileset dir="${test-output-dir}" id="other-tests-fileset">
<include name="**/**.java" />
</fileset>
<for threadCount="50" parallel="true" keepgoing="true" param="target">
<!-- A path element APPARENTLY removes duplicates and therefore this works. Could not find this documented anywhere... -->
<path>
<fileset refid="priority-tests-fileset" />
<fileset refid="other-tests-fileset" />
</path>
<sequential>
<antcall target="execute-tests">
<param name="target" value="@{target}" />
</antcall>
</sequential>
</for>
</target>
<!-- This target runs the generated and compiled junit tests -->
<target name="execute-tests">
<junit fork="yes" printsummary="withOutAndErr" maxmemory="96m">
<classpath>
<path refid="classpath" />
<pathelement path="${class-dir}" />
</classpath>
<jvmarg value="-Dcom.vaadin.testbench.tester.host=${com.vaadin.testbench.tester.host}" />
<jvmarg value="-Dcom.vaadin.testbench.deployment.url=${com.vaadin.testbench.deployment.url}" />
<!-- Define where the reference screenshots and diff files are saved -->
<jvmarg value="-Dcom.vaadin.testbench.screenshot.directory=${com.vaadin.testbench.screenshot.directory}" />
<!-- Resolution for screenshots -->
<jvmarg value="-Dcom.vaadin.testbench.screenshot.resolution=${com.vaadin.testbench.screenshot.resolution}" />
<jvmarg value="-Dcom.vaadin.testbench.debug=${com.vaadin.testbench.debug}" />
<jvmarg value="-Dcom.vaadin.testbench.screenshot.block.error=${com.vaadin.testbench.screenshot.block.error}" />
<jvmarg value="-Djava.awt.headless=true" />
<!-- true/false system arguments -->
<jvmarg value="-Dcom.vaadin.testbench.screenshot.softfail=${com.vaadin.testbench.screenshot.softfail}" />
<jvmarg value="-Dcom.vaadin.testbench.screenshot.reference.debug=${com.vaadin.testbench.screenshot.reference.debug}" />
<jvmarg value="-Dcom.vaadin.testbench.screenshot.cursor=${com.vaadin.testbench.screenshot.cursor}" />
<batchtest>
<filelist dir="${test-output-dir}" files="${target}" />
</batchtest>
</junit>
</target>
<!-- Remove temporary source and compiled java files -->
<target name="remove-temp-testclasses">
<delete failonerror="false">
<fileset dir="${test-output-dir}">
<include name="**/**.java" />
<include name="**/**.class" />
</fileset>
</delete>
</target>
<!-- Remove old error screenshots -->
<target name="remove-error-screens">
<mkdir dir="${com.vaadin.testbench.screenshot.directory}/errors" />
<delete>
<fileset dir="${com.vaadin.testbench.screenshot.directory}/errors">
<include name="**/**.*" />
</fileset>
</delete>
</target>
<!-- ================================================================== -->
<!-- Main Targets -->
<!-- ================================================================== -->
<!-- The default target. -->
<target name="run-and-clean-up" depends="check-parameters,remove-error-screens,run-tests" if="server.start.succeeded">
</target>
<!-- Also starts the server. -->
<target name="test-package" depends="server-start, run-and-clean-up, server-stop">
</target>
</project>
|