1 |
12 |
jlechner |
# Copyright (C) 2001, 2004, 2005 Free Software Foundation, Inc.
|
2 |
|
|
|
3 |
|
|
# This program is free software; you can redistribute it and/or modify
|
4 |
|
|
# it under the terms of the GNU General Public License as published by
|
5 |
|
|
# the Free Software Foundation; either version 2 of the License, or
|
6 |
|
|
# (at your option) any later version.
|
7 |
|
|
#
|
8 |
|
|
# This program is distributed in the hope that it will be useful,
|
9 |
|
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
10 |
|
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
11 |
|
|
# GNU General Public License for more details.
|
12 |
|
|
#
|
13 |
|
|
# You should have received a copy of the GNU General Public License
|
14 |
|
|
# along with this program; if not, write to the Free Software
|
15 |
|
|
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
16 |
|
|
#
|
17 |
|
|
# This script was submitted by Janis Johnson .
|
18 |
|
|
|
19 |
|
|
# Test the functionality and optionally, performance improvement, of
|
20 |
|
|
# programs compiled with profile-directed optimizations. Compile and
|
21 |
|
|
# run a test with profile options, compile it with options using the
|
22 |
|
|
# profile feedback, and then run the test again. Optionally compile
|
23 |
|
|
# and run a third time without the profile-directed optimization and
|
24 |
|
|
# compare timing results of the program with normal optimization and
|
25 |
|
|
# with the profile-directed optimization. Each test is run using
|
26 |
|
|
# multiple sets of optimization and/or code generation options in
|
27 |
|
|
# addition to the profiling and feedback options.
|
28 |
|
|
|
29 |
|
|
# If perf_ext is defined and the performance value for the
|
30 |
|
|
# profile-directed test run is nonzero then the performance check will
|
31 |
|
|
# be done.
|
32 |
|
|
|
33 |
|
|
load_lib dg.exp
|
34 |
|
|
load_lib gcc-dg.exp
|
35 |
|
|
|
36 |
|
|
global PROFOPT_OPTIONS perf_delta
|
37 |
|
|
|
38 |
|
|
# The including .exp file must define these.
|
39 |
|
|
global tool profile_option feedback_option prof_ext
|
40 |
|
|
if ![info exists tool] {
|
41 |
|
|
error "Tools is not specified."
|
42 |
|
|
}
|
43 |
|
|
if ![info exists prof_ext] {
|
44 |
|
|
error "No profile data file extensions specified."
|
45 |
|
|
}
|
46 |
|
|
|
47 |
|
|
# The maximum perforance degradation can be defined in the including file.
|
48 |
|
|
if ![info exists perf_delta] {
|
49 |
|
|
set perf_delta 4
|
50 |
|
|
}
|
51 |
|
|
|
52 |
|
|
# The default option list can be overridden by
|
53 |
|
|
# PROFOPT_OPTIONS="{ { list1 } ... { list2 } }"
|
54 |
|
|
|
55 |
|
|
if ![info exists PROFOPT_OPTIONS] {
|
56 |
|
|
set PROFOPT_OPTIONS [list \
|
57 |
|
|
{ -g } \
|
58 |
|
|
{ -O0 } \
|
59 |
|
|
{ -O1 } \
|
60 |
|
|
{ -O2 } \
|
61 |
|
|
{ -O3 } \
|
62 |
|
|
{ -O3 -g } \
|
63 |
|
|
{ -Os } ]
|
64 |
|
|
}
|
65 |
|
|
|
66 |
|
|
set prof_option_list $PROFOPT_OPTIONS
|
67 |
|
|
|
68 |
|
|
#
|
69 |
|
|
# profopt-cleanup -- remove profiling or performance results files.
|
70 |
|
|
#
|
71 |
|
|
# TESTCASE is the name of the test
|
72 |
|
|
# EXT is the extensions of files to remove
|
73 |
|
|
#
|
74 |
|
|
proc profopt-cleanup { testcase extlist } {
|
75 |
|
|
set basename [file tail $testcase]
|
76 |
|
|
set base [file rootname $basename]
|
77 |
|
|
foreach ext $extlist {
|
78 |
|
|
set files [glob -nocomplain $base.$ext]
|
79 |
|
|
if { $files != "" } {
|
80 |
|
|
eval "remote_file build delete $files"
|
81 |
|
|
}
|
82 |
|
|
}
|
83 |
|
|
}
|
84 |
|
|
|
85 |
|
|
#
|
86 |
|
|
# profopt-perf-value -- get performance value for a test
|
87 |
|
|
#
|
88 |
|
|
# TESTCASE is the name of the test
|
89 |
|
|
# PERF_EXT is the extension of the performance result file
|
90 |
|
|
# OPTSTR is the string of compiler options
|
91 |
|
|
#
|
92 |
|
|
proc profopt-perf-value { testcase perf_ext optstr } {
|
93 |
|
|
set basename [file tail $testcase]
|
94 |
|
|
set base [file rootname $basename]
|
95 |
|
|
set files [glob -nocomplain $base.$perf_ext]
|
96 |
|
|
# The file doesn't exist; let the caller decide if that's a problem.
|
97 |
|
|
if { $files == "" } {
|
98 |
|
|
return -2
|
99 |
|
|
}
|
100 |
|
|
remote_upload host $base.$perf_ext $base.$perf_ext
|
101 |
|
|
set fd [open $base.$perf_ext r]
|
102 |
|
|
gets $fd line
|
103 |
|
|
set val -2
|
104 |
|
|
if [regexp "TIME" $line] {
|
105 |
|
|
if [regexp "TIME -1" $line] {
|
106 |
|
|
fail "$testcase perf check: no consistent time available, $optstr"
|
107 |
|
|
set val -1
|
108 |
|
|
} elseif ![regexp "(\[0-9\]+)" "$line" val] {
|
109 |
|
|
set val -2
|
110 |
|
|
}
|
111 |
|
|
}
|
112 |
|
|
# Report problems with an existing file.
|
113 |
|
|
if { $val == -2 } {
|
114 |
|
|
fail "$testcase perf check: file $base.$perf_ext has wrong format, $optstr"
|
115 |
|
|
}
|
116 |
|
|
close $fd
|
117 |
|
|
profopt-cleanup $testcase $perf_ext
|
118 |
|
|
return $val
|
119 |
|
|
}
|
120 |
|
|
|
121 |
|
|
#
|
122 |
|
|
# dg-final-generate -- process code to run after the profile-generate step
|
123 |
|
|
#
|
124 |
|
|
# ARGS is the line number of the directive followed by the commands.
|
125 |
|
|
#
|
126 |
|
|
proc dg-final-generate { args } {
|
127 |
|
|
global generate_final_code
|
128 |
|
|
|
129 |
|
|
if { [llength $args] > 2 } {
|
130 |
|
|
error "[lindex $args 0]: too many arguments"
|
131 |
|
|
return
|
132 |
|
|
}
|
133 |
|
|
append generate_final_code "[lindex $args 1]\n"
|
134 |
|
|
}
|
135 |
|
|
|
136 |
|
|
#
|
137 |
|
|
# dg-final-use -- process code to run after the profile-use step
|
138 |
|
|
#
|
139 |
|
|
# ARGS is the line number of the directive followed by the commands.
|
140 |
|
|
#
|
141 |
|
|
proc dg-final-use { args } {
|
142 |
|
|
global use_final_code
|
143 |
|
|
|
144 |
|
|
if { [llength $args] > 2 } {
|
145 |
|
|
error "[lindex $args 0]: too many arguments"
|
146 |
|
|
return
|
147 |
|
|
}
|
148 |
|
|
append use_final_code "[lindex $args 1]\n"
|
149 |
|
|
}
|
150 |
|
|
|
151 |
|
|
#
|
152 |
|
|
# profopt-final-code -- run final code
|
153 |
|
|
#
|
154 |
|
|
# WHICH is "generate" or "use".
|
155 |
|
|
# FINAL_CODE is the TCL code to run.
|
156 |
|
|
# TESTCASE is the name of the test, for error messages.
|
157 |
|
|
#
|
158 |
|
|
proc profopt-final-code { which final_code name } {
|
159 |
|
|
# This is copied from dg-test in dg.exp of DejaGnu.
|
160 |
|
|
regsub -all "\\\\(\[{}\])" $final_code "\\1" final_code
|
161 |
|
|
proc profopt-final-proc { args } $final_code
|
162 |
|
|
if [catch "profopt-final-proc $name" errmsg] {
|
163 |
|
|
perror "$name: error executing dg-final-${which}: $errmsg"
|
164 |
|
|
unresolved "$name: Error executing dg-final-${which}: $errmsg"
|
165 |
|
|
}
|
166 |
|
|
}
|
167 |
|
|
|
168 |
|
|
#
|
169 |
|
|
# profopt-get-options -- process test directives
|
170 |
|
|
#
|
171 |
|
|
# SRC is the full pathname of the testcase.
|
172 |
|
|
#
|
173 |
|
|
proc profopt-get-options { src } {
|
174 |
|
|
# dg-options sets a variable called dg-extra-tool-flags.
|
175 |
|
|
set dg-extra-tool-flags ""
|
176 |
|
|
|
177 |
|
|
# dg-require-* sets dg-do-what.
|
178 |
|
|
upvar dg-do-what dg-do-what
|
179 |
|
|
|
180 |
|
|
set tmp [dg-get-options $src]
|
181 |
|
|
foreach op $tmp {
|
182 |
|
|
set cmd [lindex $op 0]
|
183 |
|
|
if { ![string compare "dg-options" $cmd] \
|
184 |
|
|
|| ![string compare "dg-skip-if" $cmd] \
|
185 |
|
|
|| ![string compare "dg-final-generate" $cmd] \
|
186 |
|
|
|| ![string compare "dg-final-use" $cmd] \
|
187 |
|
|
|| [string match "dg-require-*" $cmd] } {
|
188 |
|
|
set status [catch "$op" errmsg]
|
189 |
|
|
if { $status != 0 } {
|
190 |
|
|
perror "src: $errmsg for \"$op\"\n"
|
191 |
|
|
unresolved "$src: $errmsg for \"$op\""
|
192 |
|
|
return
|
193 |
|
|
}
|
194 |
|
|
} else {
|
195 |
|
|
# Ignore unrecognized dg- commands, but warn about them.
|
196 |
|
|
warning "compat.exp does not support $cmd"
|
197 |
|
|
}
|
198 |
|
|
}
|
199 |
|
|
|
200 |
|
|
# Return flags to use for compiling the primary source file and for
|
201 |
|
|
# linking.
|
202 |
|
|
return ${dg-extra-tool-flags}
|
203 |
|
|
}
|
204 |
|
|
|
205 |
|
|
#
|
206 |
|
|
# c-prof-execute -- compile for profiling and then feedback, then normal
|
207 |
|
|
#
|
208 |
|
|
# SRC is the full pathname of the testcase.
|
209 |
|
|
#
|
210 |
|
|
proc profopt-execute { src } {
|
211 |
|
|
global srcdir tmpdir
|
212 |
|
|
global prof_option_list
|
213 |
|
|
global tool profile_option feedback_option prof_ext perf_ext perf_delta
|
214 |
|
|
global generate_final_code use_final_code
|
215 |
|
|
global verbose
|
216 |
|
|
|
217 |
|
|
if ![info exists profile_option] {
|
218 |
|
|
error "No profile option specified for first compile."
|
219 |
|
|
}
|
220 |
|
|
if ![info exists feedback_option] {
|
221 |
|
|
error "No feedback option specified for second compile."
|
222 |
|
|
}
|
223 |
|
|
|
224 |
|
|
regsub "^$srcdir/?" $src "" testcase
|
225 |
|
|
# If we couldn't rip $srcdir out of `src' then just do the best we can.
|
226 |
|
|
# The point is to reduce the unnecessary noise in the logs. Don't strip
|
227 |
|
|
# out too much because different testcases with the same name can confuse
|
228 |
|
|
# `test-tool'.
|
229 |
|
|
if [string match "/*" $testcase] {
|
230 |
|
|
set testcase "[file tail [file dirname $src]]/[file tail $src]"
|
231 |
|
|
}
|
232 |
|
|
|
233 |
|
|
set executable $tmpdir/[file tail [file rootname $src].x]
|
234 |
|
|
set basename [file tail $testcase]
|
235 |
|
|
set base [file rootname $basename]
|
236 |
|
|
|
237 |
|
|
set count 0
|
238 |
|
|
foreach option $prof_option_list {
|
239 |
|
|
set execname1 "${executable}${count}1"
|
240 |
|
|
set execname2 "${executable}${count}2"
|
241 |
|
|
set execname3 "${executable}${count}3"
|
242 |
|
|
incr count
|
243 |
|
|
|
244 |
|
|
remote_file build delete $execname1
|
245 |
|
|
remote_file build delete $execname2
|
246 |
|
|
remote_file build delete $execname3
|
247 |
|
|
verbose "Testing $testcase, $option" 1
|
248 |
|
|
|
249 |
|
|
# Remove old profiling and performance data files.
|
250 |
|
|
foreach ext $prof_ext {
|
251 |
|
|
remote_file target delete $tmpdir/$base.$ext
|
252 |
|
|
}
|
253 |
|
|
if [info exists perf_ext] {
|
254 |
|
|
profopt-cleanup $testcase $perf_ext
|
255 |
|
|
}
|
256 |
|
|
|
257 |
|
|
# Process test directives.
|
258 |
|
|
|
259 |
|
|
set generate_final_code ""
|
260 |
|
|
set use_final_code ""
|
261 |
|
|
set dg-do-what [list "run" "" P]
|
262 |
|
|
set extra_flags [profopt-get-options $src]
|
263 |
|
|
if { [lindex ${dg-do-what} 1 ] == "N" } {
|
264 |
|
|
unsupported "$src"
|
265 |
|
|
verbose "$src not supported on this target, skipping it" 3
|
266 |
|
|
return
|
267 |
|
|
}
|
268 |
|
|
|
269 |
|
|
# Compile for profiling.
|
270 |
|
|
|
271 |
|
|
set options ""
|
272 |
|
|
lappend options "additional_flags=$option $extra_flags $profile_option"
|
273 |
|
|
set optstr "$option $profile_option"
|
274 |
|
|
set comp_output [${tool}_target_compile "$src" "$execname1" executable $options]
|
275 |
|
|
if ![${tool}_check_compile "$testcase compilation" $optstr $execname1 $comp_output] {
|
276 |
|
|
unresolved "$testcase execution, $optstr"
|
277 |
|
|
unresolved "$testcase compilation, $option $feedback_option"
|
278 |
|
|
unresolved "$testcase execution, $option $feedback_option"
|
279 |
|
|
continue
|
280 |
|
|
}
|
281 |
|
|
|
282 |
|
|
# Run the profiled test.
|
283 |
|
|
|
284 |
|
|
set result [${tool}_load $execname1 "" ""]
|
285 |
|
|
set status [lindex $result 0]
|
286 |
|
|
set missing_file 0
|
287 |
|
|
# Make sure the profile data was generated, and fail if not.
|
288 |
|
|
if { $status == "pass" } {
|
289 |
|
|
foreach ext $prof_ext {
|
290 |
|
|
remote_upload target $tmpdir/$base.$ext
|
291 |
|
|
set files [glob -nocomplain $base.$ext]
|
292 |
|
|
if { $files == "" } {
|
293 |
|
|
set status "fail"
|
294 |
|
|
set missing_file 1
|
295 |
|
|
fail "$testcase execution: file $base.$ext does not exist, $option $profile_option"
|
296 |
|
|
}
|
297 |
|
|
}
|
298 |
|
|
}
|
299 |
|
|
if { $missing_file == 0 } {
|
300 |
|
|
$status "$testcase execution, $optstr"
|
301 |
|
|
}
|
302 |
|
|
|
303 |
|
|
# If there is dg-final code to execute for the generate step, do it
|
304 |
|
|
# even if it failed; it might clean up temporary files.
|
305 |
|
|
if ![string match $generate_final_code ""] {
|
306 |
|
|
profopt-final-code "generate" $generate_final_code $testcase
|
307 |
|
|
}
|
308 |
|
|
|
309 |
|
|
remote_file build delete $execname1
|
310 |
|
|
|
311 |
|
|
# Quit for this round if it failed
|
312 |
|
|
if { $status != "pass" } {
|
313 |
|
|
unresolved "$testcase compilation, $option $feedback_option"
|
314 |
|
|
unresolved "$testcase execution, $option $feedback_option"
|
315 |
|
|
continue
|
316 |
|
|
}
|
317 |
|
|
|
318 |
|
|
# Compile with feedback-directed optimizations.
|
319 |
|
|
|
320 |
|
|
set options ""
|
321 |
|
|
lappend options "additional_flags=$option $extra_flags $feedback_option"
|
322 |
|
|
set optstr "$option $feedback_option"
|
323 |
|
|
set comp_output [${tool}_target_compile "$src" "$execname2" "executable" $options]
|
324 |
|
|
if ![${tool}_check_compile "$testcase compilation" $optstr $execname2 $comp_output] {
|
325 |
|
|
unresolved "$testcase execution, $optstr"
|
326 |
|
|
continue
|
327 |
|
|
}
|
328 |
|
|
|
329 |
|
|
# Run the profile-directed optimized test.
|
330 |
|
|
|
331 |
|
|
set result [${tool}_load "$execname2" "" ""]
|
332 |
|
|
set status [lindex $result 0]
|
333 |
|
|
$status "$testcase execution, $optstr"
|
334 |
|
|
|
335 |
|
|
# If there is dg-final code to execute for the use step, do it.
|
336 |
|
|
if ![string match $use_final_code ""] {
|
337 |
|
|
profopt-final-code "use" $use_final_code $testcase
|
338 |
|
|
}
|
339 |
|
|
|
340 |
|
|
# Remove the profiling data files.
|
341 |
|
|
foreach ext $prof_ext {
|
342 |
|
|
remote_file target delete $tmpdir/$base.$ext
|
343 |
|
|
}
|
344 |
|
|
|
345 |
|
|
if { $status != "pass" } {
|
346 |
|
|
continue
|
347 |
|
|
}
|
348 |
|
|
|
349 |
|
|
# If the test is not expected to produce performance data then
|
350 |
|
|
# we're done now.
|
351 |
|
|
if ![info exists perf_ext] {
|
352 |
|
|
remote_file build delete $execname2
|
353 |
|
|
continue
|
354 |
|
|
}
|
355 |
|
|
|
356 |
|
|
# Get the performance data from the test built with
|
357 |
|
|
# profile-directed optimization. If the file doesn't exist or if
|
358 |
|
|
# the value is zero, skip the performance comparison.
|
359 |
|
|
set val2 [profopt-perf-value $testcase $perf_ext $optstr]
|
360 |
|
|
if { $val2 <= 0 } {
|
361 |
|
|
remote_file build delete $execname2
|
362 |
|
|
continue
|
363 |
|
|
}
|
364 |
|
|
|
365 |
|
|
# Compile with normal optimizations.
|
366 |
|
|
|
367 |
|
|
set options ""
|
368 |
|
|
lappend options "additional_flags=$option"
|
369 |
|
|
set optstr "$option"
|
370 |
|
|
set comp_output [${tool}_target_compile "$src" "$execname3" "executable" $options]
|
371 |
|
|
if ![${tool}_check_compile "$testcase compilation" $optstr $execname3 $comp_output] {
|
372 |
|
|
unresolved "$testcase execution, $optstr"
|
373 |
|
|
unresolved "$testcase perf check, $optstr"
|
374 |
|
|
continue
|
375 |
|
|
}
|
376 |
|
|
|
377 |
|
|
# Run the test with normal optimizations.
|
378 |
|
|
|
379 |
|
|
set result [${tool}_load "$execname3" "" ""]
|
380 |
|
|
set status [lindex $result 0]
|
381 |
|
|
$status "$testcase execution, $optstr"
|
382 |
|
|
if { $status != "pass" } {
|
383 |
|
|
unresolved "$testcase perf check, $optstr"
|
384 |
|
|
continue
|
385 |
|
|
}
|
386 |
|
|
|
387 |
|
|
# Get the performance data from the test built with normal
|
388 |
|
|
# optimization.
|
389 |
|
|
set val1 [profopt-perf-value $testcase $perf_ext $optstr]
|
390 |
|
|
if { $val1 < 0 } {
|
391 |
|
|
if { $val1 == -2 } {
|
392 |
|
|
# The data file existed with the profile-directed
|
393 |
|
|
# optimization so this one should, too.
|
394 |
|
|
fail "$testcase perf check: file $base.$perf_ext does not exist, $optstr"
|
395 |
|
|
}
|
396 |
|
|
continue
|
397 |
|
|
}
|
398 |
|
|
|
399 |
|
|
# Compare results of the two runs and fail if the time with the
|
400 |
|
|
# profile-directed optimization is significantly more than the time
|
401 |
|
|
# without it.
|
402 |
|
|
set status "pass"
|
403 |
|
|
if { $val2 > $val1 } {
|
404 |
|
|
# Check for a performance degration outside of allowable limits.
|
405 |
|
|
if { [expr $val2 - $val1] > [expr [expr $val1 * $perf_delta] / 100] } {
|
406 |
|
|
set status "fail"
|
407 |
|
|
}
|
408 |
|
|
}
|
409 |
|
|
if { $status == "fail" } {
|
410 |
|
|
fail "$testcase perf check: orig: $val1 new: $val2, $optstr"
|
411 |
|
|
} else {
|
412 |
|
|
$status "$testcase perf check, $optstr"
|
413 |
|
|
verbose "$testcase orig: $val1 new: $val2, $optstr" 2
|
414 |
|
|
remote_file build delete $execname2
|
415 |
|
|
remote_file build delete $execname3
|
416 |
|
|
}
|
417 |
|
|
}
|
418 |
|
|
}
|