3 stats
= eval(file("var/crawl.stats").read())
6 def print_url(title
, entry
):
10 print "URL: %s" % entry
["orig_url"]
11 print "Referer: %s" % entry
["referer_url"]
12 print "Duration: %0.2f sec (Avg. %0.2f sec of %d tries)" % \
13 (entry
["last_duration"], entry
["avg_duration"],
15 print "Size: %0.2f KBytes (Avg. %0.2f KBytes of %d tries)" % \
16 (entry
["last_content_size"]/1024.0,
17 entry
["avg_content_size"]/1024.0,
21 print "Total:", stats
["_TOTAL_"]
24 print "+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++"
25 print "+ SLOWEST PAGES +"
26 print "+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++"
29 for num
, (key
, val
) in enumerate(sorted(stats
.items(),
30 key
=lambda x
: "avg_duration" in x
[1] \
31 and x
[1]["avg_duration"]
32 or 0, reverse
=True)[:10]):
36 print_url("%02d Slowest" % num
, val
)
39 print "+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++"
40 print "+ LARGEST PAGES +"
41 print "+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++"
44 for num
, (key
, val
) in enumerate(sorted(stats
.items(),
45 key
=lambda x
: "avg_content_size" in x
[1] \
46 and x
[1]["avg_content_size"]
47 or 0, reverse
=True)[:10]):
51 print_url("%02d Largest" % num
, val
)