1 # Copyright
(C
) 2001, 2004, 2005 Free Software Foundation
, Inc.
3 # This
program is free software
; you can redistribute it and
/or modify
4 # it under the terms of the GNU General Public License as published by
5 # the Free Software Foundation
; either version
2 of the License
, or
6 #
(at your option
) any later version.
8 # This
program is distributed in the hope that it will be useful
,
9 # but WITHOUT
ANY WARRANTY
; without even the implied warranty of
10 # MERCHANTABILITY or FITNESS
FOR A PARTICULAR PURPOSE. See the
11 # GNU General Public License
for more details.
13 # You should have received a copy of the GNU General Public License
14 # along with this
program; if not
, write to the Free Software
15 # Foundation
, Inc.
, 51 Franklin Street
, Fifth Floor
, Boston
, MA
02110-1301, USA.
17 # This script was submitted by Janis Johnson
<janis187@us.ibm.com
>.
19 # Test the functionality and optionally
, performance improvement
, of
20 # programs compiled with profile
-directed optimizations.
Compile and
21 # run a test with profile options
, compile it with options using the
22 # profile feedback
, and
then run the test again. Optionally
compile
23 # and run a third time without the profile
-directed optimization and
24 # compare timing results of the
program with
normal optimization and
25 # with the profile
-directed optimization. Each test is run using
26 # multiple sets of optimization and
/or code generation options in
27 # addition to the profiling and feedback options.
29 #
If perf_ext is defined and the performance value
for the
30 # profile
-directed test run is nonzero
then the performance check will
36 global PROFOPT_OPTIONS perf_delta
38 # The including .exp file must
define these.
39 global tool profile_option feedback_option prof_ext
40 if ![info exists tool
] {
41 error
"Tools is not specified."
43 if ![info exists prof_ext
] {
44 error
"No profile data file extensions specified."
47 # The maximum perforance degradation can be defined in the including file.
48 if ![info exists perf_delta
] {
52 # The default option list can be overridden by
53 # PROFOPT_OPTIONS
="{ { list1 } ... { list2 } }"
55 if ![info exists PROFOPT_OPTIONS
] {
56 set PROFOPT_OPTIONS
[list \
66 set prof_option_list $PROFOPT_OPTIONS
69 # profopt
-cleanup
-- remove profiling or performance results files.
71 # TESTCASE is the
name of the test
72 # EXT is the extensions of files to remove
74 proc profopt
-cleanup
{ testcase extlist
} {
75 set basename
[file tail $testcase
]
76 set base
[file rootname $basename
]
77 foreach ext $extlist
{
78 set files
[glob
-nocomplain $base.$ext
]
80 eval
"remote_file build delete $files"
86 # profopt
-perf
-value
-- get performance value
for a test
88 # TESTCASE is the
name of the test
89 # PERF_EXT is the extension of the performance result file
90 # OPTSTR is the string of compiler options
92 proc profopt
-perf
-value
{ testcase perf_ext optstr
} {
93 set basename
[file tail $testcase
]
94 set base
[file rootname $basename
]
95 set files
[glob
-nocomplain $base.$perf_ext
]
96 # The file doesn
't exist; let the caller decide if that's a problem.
100 remote_upload host $base.$perf_ext $base.$perf_ext
101 set fd
[open $base.$perf_ext r
]
104 if [regexp
"TIME" $line] {
105 if [regexp
"TIME -1" $line] {
106 fail
"$testcase perf check: no consistent time available, $optstr"
108 } elseif
![regexp
"(\[0-9\]+)" "$line" val] {
112 #
Report problems with an existing file.
114 fail
"$testcase perf check: file $base.$perf_ext has wrong format, $optstr"
117 profopt
-cleanup $testcase $perf_ext
122 # dg
-final
-generate
-- process code to run after the profile
-generate step
124 #
ARGS is the line number of the directive followed by the commands.
126 proc dg
-final
-generate
{ args } {
127 global generate_final_code
129 if { [llength $
args] > 2 } {
130 error
"[lindex $args 0]: too many arguments"
133 append generate_final_code
"[lindex $args 1]\n"
137 # dg
-final
-use
-- process code to run after the profile
-use step
139 #
ARGS is the line number of the directive followed by the commands.
141 proc dg
-final
-use
{ args } {
142 global use_final_code
144 if { [llength $
args] > 2 } {
145 error
"[lindex $args 0]: too many arguments"
148 append use_final_code
"[lindex $args 1]\n"
152 # profopt
-final
-code
-- run final code
154 # WHICH is
"generate" or "use".
155 # FINAL_CODE is the TCL code to run.
156 # TESTCASE is the
name of the test
, for error messages.
158 proc profopt
-final
-code
{ which final_code
name } {
159 # This is copied from dg
-test in dg.exp of DejaGnu.
160 regsub
-all
"\\\\(\[{}\])" $final_code "\\1" final_code
161 proc profopt
-final
-proc
{ args } $final_code
162 if [catch
"profopt-final-proc $name" errmsg] {
163 perror
"$name: error executing dg-final-${which}: $errmsg"
164 unresolved
"$name: Error executing dg-final-${which}: $errmsg"
169 # profopt
-get-options
-- process test directives
171 # SRC is the full pathname of the testcase.
173 proc profopt
-get-options
{ src
} {
174 # dg
-options sets a
variable called dg
-extra
-tool
-flags.
175 set dg
-extra
-tool
-flags
""
177 # dg
-require
-* sets dg
-do-what.
178 upvar dg
-do-what dg
-do-what
180 set tmp
[dg
-get-options $src
]
182 set cmd
[lindex $op
0]
183 if { ![string compare
"dg-options" $cmd] \
184 ||
![string compare
"dg-skip-if" $cmd] \
185 ||
![string compare
"dg-final-generate" $cmd] \
186 ||
![string compare
"dg-final-use" $cmd] \
187 ||
[string match
"dg-require-*" $cmd] } {
188 set status [catch
"$op" errmsg]
189 if { $
status != 0 } {
190 perror
"src: $errmsg for \"$op\"\n"
191 unresolved
"$src: $errmsg for \"$op\""
195 # Ignore unrecognized dg
- commands
, but warn about them.
196 warning
"compat.exp does not support $cmd"
200 #
Return flags to use
for compiling the primary source file and
for
202 return $
{dg
-extra
-tool
-flags
}
206 # c
-prof
-execute -- compile for profiling and
then feedback
, then normal
208 # SRC is the full pathname of the testcase.
210 proc profopt
-execute { src
} {
212 global prof_option_list
213 global tool profile_option feedback_option prof_ext perf_ext perf_delta
214 global generate_final_code use_final_code
217 if ![info exists profile_option
] {
218 error
"No profile option specified for first compile."
220 if ![info exists feedback_option
] {
221 error
"No feedback option specified for second compile."
224 regsub
"(?q)$srcdir/" $src "" testcase
225 #
If we couldn
't rip $srcdir out of `src' then just
do the best we can.
226 # The point is to reduce the unnecessary noise in the logs. Don
't strip
227 # out too much because different testcases with the same name can confuse
229 if [string match
"/*" $testcase] {
230 set testcase
"[file tail [file dirname $src]]/[file tail $src]"
233 set executable $tmpdir
/[file tail
[file rootname $src
].x
]
234 set basename
[file tail $testcase
]
235 set base
[file rootname $basename
]
238 foreach option $prof_option_list
{
239 set execname1
"${executable}${count}1"
240 set execname2
"${executable}${count}2"
241 set execname3
"${executable}${count}3"
244 remote_file build
delete $execname1
245 remote_file build
delete $execname2
246 remote_file build
delete $execname3
247 verbose
"Testing $testcase, $option" 1
249 # Remove old profiling and performance data files.
250 foreach ext $prof_ext
{
251 remote_file target
delete $tmpdir
/$base.$ext
253 if [info exists perf_ext
] {
254 profopt
-cleanup $testcase $perf_ext
257 # Process test directives.
259 set generate_final_code
""
260 set use_final_code
""
261 set dg
-do-what
[list
"run" "" P]
262 set extra_flags
[profopt
-get-options $src
]
263 if { [lindex $
{dg
-do-what
} 1 ] == "N" } {
265 verbose
"$src not supported on this target, skipping it" 3
269 #
Compile for profiling.
272 lappend options
"additional_flags=$option $extra_flags $profile_option"
273 set optstr
"$option $profile_option"
274 set comp_output
[$
{tool
}_target_compile
"$src" "$execname1" executable $options]
275 if ![$
{tool
}_check_compile
"$testcase compilation" $optstr $execname1 $comp_output] {
276 unresolved
"$testcase execution, $optstr"
277 unresolved
"$testcase compilation, $option $feedback_option"
278 unresolved
"$testcase execution, $option $feedback_option"
282 # Run the profiled test.
284 set result
[$
{tool
}_load $execname1
"" ""]
285 set status [lindex $result
0]
287 # Make sure the profile data was generated
, and fail
if not.
288 if { $
status == "pass" } {
289 foreach ext $prof_ext
{
290 remote_upload target $tmpdir
/$base.$ext
291 set files
[glob
-nocomplain $base.$ext
]
292 if { $files
== "" } {
295 fail
"$testcase execution: file $base.$ext does not exist, $option $profile_option"
299 if { $missing_file
== 0 } {
300 $
status "$testcase execution, $optstr"
303 #
If there is dg
-final code to
execute for the generate step
, do it
304 # even
if it failed
; it might clean up temporary files.
305 if ![string match $generate_final_code
""] {
306 profopt
-final
-code
"generate" $generate_final_code $testcase
309 remote_file build
delete $execname1
311 # Quit
for this
round if it failed
312 if { $
status != "pass" } {
313 unresolved
"$testcase compilation, $option $feedback_option"
314 unresolved
"$testcase execution, $option $feedback_option"
318 #
Compile with feedback
-directed optimizations.
321 lappend options
"additional_flags=$option $extra_flags $feedback_option"
322 set optstr
"$option $feedback_option"
323 set comp_output
[$
{tool
}_target_compile
"$src" "$execname2" "executable" $options]
324 if ![$
{tool
}_check_compile
"$testcase compilation" $optstr $execname2 $comp_output] {
325 unresolved
"$testcase execution, $optstr"
329 # Run the profile
-directed optimized test.
331 set result
[$
{tool
}_load
"$execname2" "" ""]
332 set status [lindex $result
0]
333 $
status "$testcase execution, $optstr"
335 #
If there is dg
-final code to
execute for the use step
, do it.
336 if ![string match $use_final_code
""] {
337 profopt
-final
-code
"use" $use_final_code $testcase
340 # Remove the profiling data files.
341 foreach ext $prof_ext
{
342 remote_file target
delete $tmpdir
/$base.$ext
345 if { $
status != "pass" } {
349 #
If the test is not expected to produce performance data
then
351 if ![info exists perf_ext] {
352 remote_file build delete $execname2
356 # Get the performance data from the test built with
357 # profile-directed optimization. If the file doesn't exist or
if
358 # the value is zero
, skip the performance comparison.
359 set val2
[profopt
-perf
-value $testcase $perf_ext $optstr
]
361 remote_file build
delete $execname2
365 #
Compile with
normal optimizations.
368 lappend options
"additional_flags=$option"
370 set comp_output
[$
{tool
}_target_compile
"$src" "$execname3" "executable" $options]
371 if ![$
{tool
}_check_compile
"$testcase compilation" $optstr $execname3 $comp_output] {
372 unresolved
"$testcase execution, $optstr"
373 unresolved
"$testcase perf check, $optstr"
377 # Run the test with
normal optimizations.
379 set result
[$
{tool
}_load
"$execname3" "" ""]
380 set status [lindex $result
0]
381 $
status "$testcase execution, $optstr"
382 if { $
status != "pass" } {
383 unresolved
"$testcase perf check, $optstr"
387 #
Get the performance data from the test built with
normal
389 set val1
[profopt
-perf
-value $testcase $perf_ext $optstr
]
392 # The data file existed with the profile
-directed
393 # optimization so this one should
, too.
394 fail
"$testcase perf check: file $base.$perf_ext does not exist, $optstr"
399 # Compare results of the two runs and fail
if the time with the
400 # profile
-directed optimization is significantly more than the time
403 if { $val2
> $val1
} {
404 # Check
for a performance degration outside of allowable limits.
405 if { [expr $val2
- $val1
] > [expr
[expr $val1
* $perf_delta
] / 100] } {
409 if { $
status == "fail" } {
410 fail
"$testcase perf check: orig: $val1 new: $val2, $optstr"
412 $
status "$testcase perf check, $optstr"
413 verbose
"$testcase orig: $val1 new: $val2, $optstr" 2
414 remote_file build
delete $execname2
415 remote_file build
delete $execname3