2 # Copyright 2013 The Chromium Authors. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
6 # The optimization code is based on pngslim (http://goo.gl/a0XHg)
7 # and executes a similar pipleline to optimize the png file size.
8 # The steps that require pngoptimizercl/pngrewrite/deflopt are omitted,
9 # but this runs all other processes, including:
10 # 1) various color-dependent optimizations using optipng.
11 # 2) optimize the number of huffman blocks.
12 # 3) randomize the huffman table.
13 # 4) Further optimize using optipng and advdef (zlib stream).
14 # Due to the step 3), each run may produce slightly different results.
16 # Note(oshima): In my experiment, advdef didn't reduce much. I'm keeping it
17 # for now as it does not take much time to run.
22 chrome/browser/resources
23 chrome/renderer/resources
24 content/public/android/java/res
25 content/renderer/resources
26 content/shell/resources
29 ui/webui/resources/images
31 win8/metro_driver/resources
34 # Files larger than this file size (in bytes) will
35 # use the optimization parameters tailored for large files.
36 LARGE_FILE_THRESHOLD
=3000
38 # Constants used for optimization
39 readonly DEFAULT_MIN_BLOCK_SIZE
=128
40 readonly DEFAULT_LIMIT_BLOCKS
=256
41 readonly DEFAULT_RANDOM_TRIALS
=100
42 # Taken from the recommendation in the pngslim's readme.txt.
43 readonly LARGE_MIN_BLOCK_SIZE
=1
44 readonly LARGE_LIMIT_BLOCKS
=2
45 readonly LARGE_RANDOM_TRIALS
=1
47 # Global variables for stats
54 declare -a THROBBER_STR
=('-' '\\' '|' '/')
59 # Echo only if verbose option is set.
61 if [ $VERBOSE -eq 1 ]; then
66 # Show throbber character at current cursor position.
68 info
-ne "${THROBBER_STR[$THROBBER_COUNT]}\b"
69 let THROBBER_COUNT
=($THROBBER_COUNT+1)%4
72 # Usage: pngout_loop <file> <png_out_options> ...
73 # Optimize the png file using pngout with the given options
74 # using various block split thresholds and filter types.
75 function pngout_loop
{
79 if [ $OPTIMIZE_LEVEL == 1 ]; then
80 for j
in $
(seq 0 5); do
82 pngout
-q -k1 -s1 -f$j $opts $file
85 for i
in 0 128 256 512; do
86 for j
in $
(seq 0 5); do
88 pngout
-q -k1 -s1 -b$i -f$j $opts $file
94 # Usage: get_color_depth_list
95 # Returns the list of color depth options for current optimization level.
96 function get_color_depth_list
{
97 if [ $OPTIMIZE_LEVEL == 1 ]; then
100 echo "-d1 -d2 -d4 -d8"
104 # Usage: process_grayscale <file>
105 # Optimize grayscale images for all color bit depths.
107 # TODO(oshima): Experiment with -d0 w/o -c0.
108 function process_grayscale
{
110 for opt
in $
(get_color_depth_list
); do
111 pngout_loop
$file -c0 $opt
115 # Usage: process_grayscale_alpha <file>
116 # Optimize grayscale images with alpha for all color bit depths.
117 function process_grayscale_alpha
{
119 pngout_loop
$file -c4
120 for opt
in $
(get_color_depth_list
); do
121 pngout_loop
$file -c3 $opt
125 # Usage: process_rgb <file>
126 # Optimize rgb images with or without alpha for all color bit depths.
127 function process_rgb
{
129 for opt
in $
(get_color_depth_list
); do
130 pngout_loop
$file -c3 $opt
132 pngout_loop
$file -c2
133 pngout_loop
$file -c6
136 # Usage: huffman_blocks <file>
137 # Optimize the huffman blocks.
138 function huffman_blocks
{
141 local size
=$
(stat
-c%s
$file)
142 local min_block_size
=$DEFAULT_MIN_BLOCK_SIZE
143 local limit_blocks
=$DEFAULT_LIMIT_BLOCKS
145 if [ $size -gt $LARGE_FILE_THRESHOLD ]; then
146 min_block_size
=$LARGE_MIN_BLOCK_SIZE
147 limit_blocks
=$LARGE_LIMIT_BLOCKS
149 let max_blocks
=$size/$min_block_size
150 if [ $max_blocks -gt $limit_blocks ]; then
151 max_blocks
=$limit_blocks
154 for i
in $
(seq 2 $max_blocks); do
156 pngout
-q -k1 -ks -s1 -n$i $file
160 # Usage: random_huffman_table_trial <file>
161 # Try compressing by randomizing the initial huffman table.
163 # TODO(oshima): Try adjusting different parameters for large files to
165 function random_huffman_table_trial
{
168 local old_size
=$
(stat
-c%s
$file)
169 local trials_count
=$DEFAULT_RANDOM_TRIALS
171 if [ $old_size -gt $LARGE_FILE_THRESHOLD ]; then
172 trials_count
=$LARGE_RANDOM_TRIALS
174 for i
in $
(seq 1 $trials_count); do
176 pngout
-q -k1 -ks -s0 -r $file
178 local new_size
=$
(stat
-c%s
$file)
179 if [ $new_size -lt $old_size ]; then
180 random_huffman_table_trial
$file
184 # Usage: final_comprssion <file>
185 # Further compress using optipng and advdef.
186 # TODO(oshima): Experiment with 256.
187 function final_compression
{
190 if [ $OPTIMIZE_LEVEL == 2 ]; then
191 for i
in 32k
16k
8k
4k
2k
1k
512; do
193 optipng
-q -nb -nc -zw$i -zc1-9 -zm1-9 -zs0-3 -f0-5 $file
196 for i
in $
(seq 1 4); do
198 advdef
-q -z -$i $file
203 # Usage: get_color_type <file>
204 # Returns the color type name of the png file. Here is the list of names
205 # for each color type codes.
211 # See http://en.wikipedia.org/wiki/Portable_Network_Graphics#Color_depth
212 # for details about the color type code.
213 function get_color_type
{
215 echo $
(file $file |
awk -F, '{print $3}' |
awk '{print $2}')
218 # Usage: optimize_size <file>
219 # Performs png file optimization.
220 function optimize_size
{
225 advdef
-q -z -4 $file
227 pngout
-q -s4 -c0 -force $file $file.tmp.png
228 if [ -f $file.tmp.png
]; then
230 process_grayscale
$file
231 process_grayscale_alpha
$file
233 pngout
-q -s4 -c4 -force $file $file.tmp.png
234 if [ -f $file.tmp.png
]; then
236 process_grayscale_alpha
$file
243 local old_color_type
=$
(get_color_type
$file)
244 optipng
-q -zc9 -zm8 -zs0-3 -f0-5 $file -out $file.tmp.png
245 local new_color_type
=$
(get_color_type
$file.tmp.png
)
246 # optipng may corrupt a png file when reducing the color type
247 # to grayscale/grayscale+alpha. Just skip such cases until
248 # the bug is fixed. See crbug.com/174505, crbug.com/174084.
249 # The issue is reported in
250 # https://sourceforge.net/tracker/?func=detail&aid=3603630&group_id=151404&atid=780913
251 if [[ $old_color_type == "RGBA" && $new_color_type =~ gray.
* ]] ; then
253 info
-n "[skip opting]"
255 mv $file.tmp.png
$file
257 pngout
-q -k1 -s1 $file
261 # TODO(oshima): Experiment with strategy 1.
263 if [ $OPTIMIZE_LEVEL == 2 ]; then
265 pngout
-q -k1 -ks -s$i $file
268 pngout
-q -k1 -ks -s1 $file
271 if [ $OPTIMIZE_LEVEL == 2 ]; then
272 random_huffman_table_trial
$file
275 final_compression
$file
278 # Usage: process_file <file>
279 function process_file
{
281 local name
=$
(basename $file)
282 # -rem alla removes all ancillary chunks except for tRNS
283 pngcrush
-d $TMP_DIR -brute -reduce -rem alla
$file > /dev
/null
285 if [ -f $TMP_DIR/$name -a $OPTIMIZE_LEVEL != 0 ]; then
286 optimize_size
$TMP_DIR/$name
290 # Usage: optimize_file <file>
291 function optimize_file
{
293 if $using_cygwin ; then
294 file=$
(cygpath
-w $file)
297 local name
=$
(basename $file)
298 local old
=$
(stat
-c%s
$file)
299 local tmp_file
=$TMP_DIR/$name
304 if [ ! -e $tmp_file ] ; then
305 let CORRUPTED_FILE
+=1
306 echo "The png file ($file) may be corrupted. skipping"
310 local new
=$
(stat
-c%s
$tmp_file)
312 let percent
=($diff*100)/$old
315 if [ $new -lt $old ]; then
316 echo -ne "$file : $old => $new ($diff bytes : $percent %)\n"
317 mv "$tmp_file" "$file"
318 let TOTAL_OLD_BYTES
+=$old
319 let TOTAL_NEW_BYTES
+=$new
320 let PROCESSED_FILE
+=1
322 if [ $OPTIMIZE_LEVEL == 0 ]; then
323 info
-ne "$file : skipped\r"
329 function optimize_dir
{
331 if $using_cygwin ; then
332 dir
=$
(cygpath
-w $dir)
335 for f
in $
(find $dir -name "*.png"); do
340 function install_if_not_installed
{
343 which $program > /dev
/null
2>&1
344 if [ "$?" != "0" ]; then
345 if $using_cygwin ; then
346 echo "Couldn't find $program. " \
347 "Please run cygwin's setup.exe and install the $package package."
350 read -p "Couldn't find $program. Do you want to install? (y/n)"
351 [ "$REPLY" == "y" ] && sudo apt-get
install $package
352 [ "$REPLY" == "y" ] ||
exit
357 function fail_if_not_installed
{
360 which $program > /dev
/null
2>&1
362 echo "Couldn't find $program. Please download and install it from $url ."
368 local program
=$
(basename $0)
370 "Usage: $program [options] <dir> ...
372 $program is a utility to reduce the size of png files by removing
373 unnecessary chunks and compressing the image.
376 -o<optimize_level> Specify optimization level: (default is 1)
377 0 Just run pngcrush. It removes unnecessary chunks and perform basic
378 optimization on the encoded data.
379 1 Optimize png files using pngout/optipng and advdef. This can further
380 reduce addtional 5~30%. This is the default level.
381 2 Aggressively optimize the size of png files. This may produce
382 addtional 1%~5% reduction. Warning: this is *VERY*
383 slow and can take hours to process all files.
384 -r<revision> If this is specified, the script processes only png files
385 changed since this revision. The <dir> options will be used
386 to narrow down the files under specific directories.
387 -v Shows optimization process for each file.
388 -h Print this help text."
392 if [ ! -e ..
/.gclient
]; then
393 echo "$0 must be run in src directory"
397 if [ "$(expr substr $(uname -s) 1 6)" == "CYGWIN" ]; then
405 while getopts o
:r
:h
:v opts
409 COMMIT
=$
(git svn find-rev r
$OPTARG |
tail -1) ||
exit
410 if [ -z "$COMMIT" ] ; then
411 echo "Revision $OPTARG not found"
416 if [[ ! "$OPTARG" =~
[012] ]] ; then
419 OPTIMIZE_LEVEL
=$OPTARG
429 # Remove options from argument list.
430 shift $
(($OPTIND -1))
432 # Make sure we have all necessary commands installed.
433 install_if_not_installed pngcrush pngcrush
434 if [ $OPTIMIZE_LEVEL -ge 1 ]; then
435 install_if_not_installed optipng optipng
437 if $using_cygwin ; then
438 fail_if_not_installed advdef
"http://advancemame.sourceforge.net/comp-readme.html"
440 install_if_not_installed advdef advancecomp
443 if $using_cygwin ; then
444 pngout_url
="http://www.advsys.net/ken/utils.htm"
446 pngout_url
="http://www.jonof.id.au/kenutils"
448 fail_if_not_installed pngout
$pngout_url
451 # Create tmp directory for crushed png file.
453 if $using_cygwin ; then
454 TMP_DIR
=$
(cygpath
-w $TMP_DIR)
457 # Make sure we cleanup temp dir
458 trap "rm -rf $TMP_DIR" EXIT
460 # If no directories are specified, optimize all directories.
462 set ${DIRS:=$ALL_DIRS}
464 info
"Optimize level=$OPTIMIZE_LEVEL"
466 if [ -n "$COMMIT" ] ; then
467 ALL_FILES
=$
(git
diff --name-only $COMMIT HEAD
$DIRS |
grep "png$")
468 ALL_FILES_LIST
=( $ALL_FILES )
469 echo "Processing ${#ALL_FILES_LIST[*]} files"
470 for f
in $ALL_FILES; do
474 echo "Skipping deleted file: $f";
480 info
"Optimizing png files in $d"
483 elif [ -f $d ] ; then
486 echo "Not a file or directory: $d";
492 if [ $PROCESSED_FILE == 0 ]; then
493 echo "Did not find any files (out of $TOTAL_FILE files)" \
494 "that could be optimized" \
495 "in $(date -u -d @$SECONDS +%T)s"
497 let diff=$TOTAL_OLD_BYTES-$TOTAL_NEW_BYTES
498 let percent
=$diff*100/$TOTAL_OLD_BYTES
499 echo "Processed $PROCESSED_FILE files (out of $TOTAL_FILE files)" \
500 "in $(date -u -d @$SECONDS +%T)s"
501 echo "Result : $TOTAL_OLD_BYTES => $TOTAL_NEW_BYTES bytes" \
502 "($diff bytes : $percent %)"
504 if [ $CORRUPTED_FILE != 0 ]; then
505 echo "Warning: corrupted files found: $CORRUPTED_FILE"
506 echo "Please contact the author of the CL that landed corrupted png files"