Add test_server_setup.py and add support for additional flags.
[chromium-blink-merge.git] / build / sanitize-png-files.sh
blobe47508e470b0836323b2f590750918a78706ffce
1 #!/bin/bash
2 # Copyright (c) 2010 The Chromium Authors. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
6 # The optimization code is based on pngslim (http://goo.gl/a0XHg)
7 # and executes a similar pipleline to optimize the png file size.
8 # The steps that require pngoptimizercl/pngrewrite/deflopt are omitted,
9 # but this runs all other processes, including:
10 # 1) various color-dependent optimizations using optipng.
11 # 2) optimize the number of huffman blocks.
12 # 3) randomize the huffman table.
13 # 4) Further optimize using optipng and advdef (zlib stream).
14 # Due to the step 3), each run may produce slightly different results.
16 # Note(oshima): In my experiment, advdef didn't reduce much. I'm keeping it
17 # for now as it does not take much time to run.
19 readonly ALL_DIRS="
20 ash/resources
21 ui/resources
22 chrome/app/theme
23 chrome/browser/resources
24 chrome/renderer/resources
25 webkit/glue/resources
26 remoting/resources
27 remoting/webapp
30 # Files larger than this file size (in bytes) will
31 # use the optimization parameters tailored for large files.
32 LARGE_FILE_THRESHOLD=3000
34 # Constants used for optimization
35 readonly DEFAULT_MIN_BLOCK_SIZE=128
36 readonly DEFAULT_LIMIT_BLOCKS=256
37 readonly DEFAULT_RANDOM_TRIALS=100
38 # Taken from the recommendation in the pngslim's readme.txt.
39 readonly LARGE_MIN_BLOCK_SIZE=1
40 readonly LARGE_LIMIT_BLOCKS=2
41 readonly LARGE_RANDOM_TRIALS=1
43 # Global variables for stats
44 TOTAL_OLD_BYTES=0
45 TOTAL_NEW_BYTES=0
46 TOTAL_FILE=0
47 PROCESSED_FILE=0
49 declare -a THROBBER_STR=('-' '\\' '|' '/')
50 THROBBER_COUNT=0
52 # Show throbber character at current cursor position.
53 function throbber {
54 echo -ne "${THROBBER_STR[$THROBBER_COUNT]}\b"
55 let THROBBER_COUNT=($THROBBER_COUNT+1)%4
58 # Usage: pngout_loop <file> <png_out_options> ...
59 # Optimize the png file using pngout with the given options
60 # using various block split thresholds and filter types.
61 function pngout_loop {
62 local file=$1
63 shift
64 local opts=$*
65 if [ $OPTIMIZE_LEVEL == 1 ]; then
66 for j in $(seq 0 5); do
67 throbber
68 pngout -q -k1 -s1 -f$j $opts $file
69 done
70 else
71 for i in 0 128 256 512; do
72 for j in $(seq 0 5); do
73 throbber
74 pngout -q -k1 -s1 -b$i -f$j $opts $file
75 done
76 done
80 # Usage: get_color_depth_list
81 # Returns the list of color depth options for current optimization level.
82 function get_color_depth_list {
83 if [ $OPTIMIZE_LEVEL == 1 ]; then
84 echo "-d0"
85 else
86 echo "-d1 -d2 -d4 -d8"
90 # Usage: process_grayscale <file>
91 # Optimize grayscale images for all color bit depths.
93 # TODO(oshima): Experiment with -d0 w/o -c0.
94 function process_grayscale {
95 echo -n "|gray"
96 for opt in $(get_color_depth_list); do
97 pngout_loop $file -c0 $opt
98 done
101 # Usage: process_grayscale_alpha <file>
102 # Optimize grayscale images with alpha for all color bit depths.
103 function process_grayscale_alpha {
104 echo -n "|gray-a"
105 pngout_loop $file -c4
106 for opt in $(get_color_depth_list); do
107 pngout_loop $file -c3 $opt
108 done
111 # Usage: process_rgb <file>
112 # Optimize rgb images with or without alpha for all color bit depths.
113 function process_rgb {
114 echo -n "|rgb"
115 for opt in $(get_color_depth_list); do
116 pngout_loop $file -c3 $opt
117 done
118 pngout_loop $file -c2
119 pngout_loop $file -c6
122 # Usage: huffman_blocks <file>
123 # Optimize the huffman blocks.
124 function huffman_blocks {
125 local file=$1
126 echo -n "|huffman"
127 local size=$(stat -c%s $file)
128 local min_block_size=$DEFAULT_MIN_BLOCK_SIZE
129 local limit_blocks=$DEFAULT_LIMIT_BLOCKS
131 if [ $size -gt $LARGE_FILE_THRESHOLD ]; then
132 min_block_size=$LARGE_MIN_BLOCK_SIZE
133 limit_blocks=$LARGE_LIMIT_BLOCKS
135 let max_blocks=$size/$min_block_size
136 if [ $max_blocks -gt $limit_blocks ]; then
137 max_blocks=$limit_blocks
140 for i in $(seq 2 $max_blocks); do
141 throbber
142 pngout -q -k1 -ks -s1 -n$i $file
143 done
146 # Usage: random_huffman_table_trial <file>
147 # Try compressing by randomizing the initial huffman table.
149 # TODO(oshima): Try adjusting different parameters for large files to
150 # reduce runtime.
151 function random_huffman_table_trial {
152 echo -n "|random"
153 local file=$1
154 local old_size=$(stat -c%s $file)
155 local trials_count=$DEFAULT_RANDOM_TRIALS
157 if [ $old_size -gt $LARGE_FILE_THRESHOLD ]; then
158 trials_count=$LARGE_RANDOM_TRIALS
160 for i in $(seq 1 $trials_count); do
161 throbber
162 pngout -q -k1 -ks -s0 -r $file
163 done
164 local new_size=$(stat -c%s $file)
165 if [ $new_size -lt $old_size ]; then
166 random_huffman_table_trial $file
170 # Usage: final_comprssion <file>
171 # Further compress using optipng and advdef.
172 # TODO(oshima): Experiment with 256.
173 function final_compression {
174 echo -n "|final"
175 local file=$1
176 if [ $OPTIMIZE_LEVEL == 2 ]; then
177 for i in 32k 16k 8k 4k 2k 1k 512; do
178 throbber
179 optipng -q -nb -nc -zw$i -zc1-9 -zm1-9 -zs0-3 -f0-5 $file
180 done
182 for i in $(seq 1 4); do
183 throbber
184 advdef -q -z -$i $file
185 done
186 echo -ne "\r"
189 # Usage: get_color_type <file>
190 # Returns the color type name of the png file. Here is the list of names
191 # for each color type codes.
192 # 0 : grayscale
193 # 2 : RGB
194 # 3 : colormap
195 # 4 : gray+alpha
196 # 6 : RGBA
197 # See http://en.wikipedia.org/wiki/Portable_Network_Graphics#Color_depth
198 # for details about the color type code.
199 function get_color_type {
200 local file=$1
201 echo $(file $file | awk -F, '{print $3}' | awk '{print $2}')
204 # Usage: optimize_size <file>
205 # Performs png file optimization.
206 function optimize_size {
207 tput el
208 local file=$1
209 echo -n "$file "
211 advdef -q -z -4 $file
213 pngout -q -s4 -c0 -force $file $file.tmp.png
214 if [ -f $file.tmp.png ]; then
215 rm $file.tmp.png
216 process_grayscale $file
217 process_grayscale_alpha $file
218 else
219 pngout -q -s4 -c4 -force $file $file.tmp.png
220 if [ -f $file.tmp.png ]; then
221 rm $file.tmp.png
222 process_grayscale_alpha $file
223 else
224 process_rgb $file
228 echo -n "|filter"
229 local old_color_type=$(get_color_type $file)
230 optipng -q -zc9 -zm8 -zs0-3 -f0-5 $file -out $file.tmp.png
231 local new_color_type=$(get_color_type $file.tmp.png)
232 # optipng may corrupt a png file when reducing the color type
233 # to grayscale/grayscale+alpha. Just skip such cases until
234 # the bug is fixed. See crbug.com/174505, crbug.com/174084.
235 # The issue is reported in
236 # https://sourceforge.net/tracker/?func=detail&aid=3603630&group_id=151404&atid=780913
237 if [[ $old_color_type == "RGBA" && $new_color_type =~ gray.* ]] ; then
238 rm $file.tmp.png
239 echo -n "[skip opting]"
240 else
241 mv $file.tmp.png $file
243 pngout -q -k1 -s1 $file
245 huffman_blocks $file
247 # TODO(oshima): Experiment with strategy 1.
248 echo -n "|strategy"
249 if [ $OPTIMIZE_LEVEL == 2 ]; then
250 for i in 3 2 0; do
251 pngout -q -k1 -ks -s$i $file
252 done
253 else
254 pngout -q -k1 -ks -s1 $file
257 if [ $OPTIMIZE_LEVEL == 2 ]; then
258 random_huffman_table_trial $file
261 final_compression $file
264 # Usage: process_file <file>
265 function process_file {
266 local file=$1
267 local name=$(basename $file)
268 # -rem alla removes all ancillary chunks except for tRNS
269 pngcrush -d $TMP_DIR -brute -reduce -rem alla $file > /dev/null
271 if [ $OPTIMIZE_LEVEL != 0 ]; then
272 optimize_size $TMP_DIR/$name
276 # Usage: sanitize_file <file>
277 function sanitize_file {
278 local file=$1
279 local name=$(basename $file)
280 local old=$(stat -c%s $file)
281 local tmp_file=$TMP_DIR/$name
283 process_file $file
285 local new=$(stat -c%s $tmp_file)
286 let diff=$old-$new
287 let percent=($diff*100)/$old
288 let TOTAL_FILE+=1
290 tput el
291 if [ $new -lt $old ]; then
292 echo -ne "$file : $old => $new ($diff bytes : $percent %)\n"
293 mv "$tmp_file" "$file"
294 let TOTAL_OLD_BYTES+=$old
295 let TOTAL_NEW_BYTES+=$new
296 let PROCESSED_FILE+=1
297 else
298 if [ $OPTIMIZE_LEVEL == 0 ]; then
299 echo -ne "$file : skipped\r"
301 rm $tmp_file
305 function sanitize_dir {
306 local dir=$1
307 for f in $(find $dir -name "*.png"); do
308 if $using_cygwin ; then
309 sanitize_file $(cygpath -w $f)
310 else
311 sanitize_file $f
313 done
316 function install_if_not_installed {
317 local program=$1
318 local package=$2
319 which $program > /dev/null 2>&1
320 if [ "$?" != "0" ]; then
321 if $using_cygwin ; then
322 echo "Couldn't find $program. Please run setup.exe and install the $package package."
323 exit 1
324 else
325 read -p "Couldn't find $program. Do you want to install? (y/n)"
326 [ "$REPLY" == "y" ] && sudo apt-get install $package
327 [ "$REPLY" == "y" ] || exit
332 function fail_if_not_installed {
333 local program=$1
334 local url=$2
335 which $program > /dev/null 2>&1
336 if [ $? != 0 ]; then
337 echo "Couldn't find $program. Please download and install it from $url ."
338 exit 1
342 function show_help {
343 local program=$(basename $0)
344 echo \
345 "Usage: $program [options] dir ...
347 $program is a utility to reduce the size of png files by removing
348 unnecessary chunks and compressing the image.
350 Options:
351 -o<optimize_level> Specify optimization level: (default is 1)
352 0 Just run pngcrush. It removes unnecessary chunks and perform basic
353 optimization on the encoded data.
354 1 Optimize png files using pngout/optipng and advdef. This can further
355 reduce addtional 5~30%. This is the default level.
356 2 Aggressively optimize the size of png files. This may produce
357 addtional 1%~5% reduction. Warning: this is *VERY*
358 slow and can take hours to process all files.
359 -h Print this help text."
360 exit 1
363 if [ ! -e ../.gclient ]; then
364 echo "$0 must be run in src directory"
365 exit 1
368 if [ "$(expr substr $(uname -s) 1 6)" == "CYGWIN" ]; then
369 using_cygwin=true
370 else
371 using_cygwin=false
374 OPTIMIZE_LEVEL=1
375 # Parse options
376 while getopts o:h opts
378 case $opts in
380 if [[ ! "$OPTARG" =~ [012] ]]; then
381 show_help
383 OPTIMIZE_LEVEL=$OPTARG
384 [ "$1" == "-o" ] && shift
385 shift;;
386 [h?])
387 show_help;;
388 esac
389 done
391 # Make sure we have all necessary commands installed.
392 install_if_not_installed pngcrush pngcrush
393 if [ $OPTIMIZE_LEVEL == 2 ]; then
394 install_if_not_installed optipng optipng
396 if $using_cygwin ; then
397 fail_if_not_installed advdef "http://advancemame.sourceforge.net/comp-readme.html"
398 else
399 install_if_not_installed advdef advancecomp
402 if $using_cygwin ; then
403 pngout_url="http://www.advsys.net/ken/utils.htm"
404 else
405 pngout_url="http://www.jonof.id.au/kenutils"
407 fail_if_not_installed pngout $pngout_url
410 # Create tmp directory for crushed png file.
411 TMP_DIR=$(mktemp -d)
412 if $using_cygwin ; then
413 TMP_DIR=$(cygpath -w $TMP_DIR)
416 # Make sure we cleanup temp dir
417 trap "rm -rf $TMP_DIR" EXIT
419 # If no directories are specified, sanitize all directories.
420 DIRS=$@
421 set ${DIRS:=$ALL_DIRS}
423 echo "Optimize level=$OPTIMIZE_LEVEL"
424 for d in $DIRS; do
425 if $using_cygwin ; then
426 d=$(cygpath -w $d)
428 echo "Sanitizing png files in $d"
429 sanitize_dir $d
430 echo
431 done
433 # Print the results.
434 if [ $PROCESSED_FILE == 0 ]; then
435 echo "Did not find any files (out of $TOTAL_FILE files)" \
436 "that could be optimized" \
437 "in $(date -u -d @$SECONDS +%T)s"
438 else
439 let diff=$TOTAL_OLD_BYTES-$TOTAL_NEW_BYTES
440 let percent=$diff*100/$TOTAL_OLD_BYTES
441 echo "Processed $PROCESSED_FILE files (out of $TOTAL_FILE files)" \
442 "in $(date -u -d @$SECONDS +%T)s"
443 echo "Result : $TOTAL_OLD_BYTES => $TOTAL_NEW_BYTES bytes" \
444 "($diff bytes : $percent %)"