2 # run as puppet-dashboard
4 if [[ -n "${PPG_DEBUG}" ]]; then
7 PPG_EXEC_PATH
=$
( dirname $
(readlink
-f "${BASH_SOURCE[0]}" ) )a
=
9 # Assume reports are around 400KB each uncompressed.
10 # (this is the full report size, "lazy" reports are small)
11 # Each batch of 3000 will take 1.2GB of disk space
12 # and that 1.2GB will remain in use until GC'd 2hs later.
14 reportsdir
=/var
/lib
/ppg
/reports-import
15 reportstmpdir
=$
(mktemp
-d --tmpdir=/var
/tmp ppg-reports-to-dashboard.XXXXXXX
)
17 # find files, skipping those older than 2 days,
19 for fpath
in $
(find ${reportsdir} -type f
-name '*.yaml.gz' -mtime -2 -printf '%T@ %p\n' |
sort -n -k 1 | cut
-d' ' -f2 |
head -n ${max_per_batch}); do
21 # strip out reportsdir, split out hostname from filename (which is a timestamp)
22 fpath
=${fpath:$[${#reportsdir}+1]}
23 fname
=$
(basename $fpath)
24 hostname
=$
(dirname $fpath)
25 # we use special chars so quoting is important
26 mv "${reportsdir}/$fpath" "$reportstmpdir/${hostname}_${fname}"
28 if [ "${seenany}" = 'yes' ]; then
29 gzip -q -d $reportstmpdir/*.gz
30 pushd /usr
/share
/puppet-dashboard
>/dev
/null
31 rake RAILS_ENV
=production reports
:import REPORT_DIR
=${reportstmpdir} |
grep -v 'ETA:.*Importing:'
35 # the rake/import process needs the file to stick around for a while
36 # and does not delete it when done, so this is fugly
37 find /var
/tmp
-type d
-name 'ppg-reports-to-dashboard.*' -user puppet-dashboard
-mmin +120 -print0 | \
38 xargs -0 --no-run-if-empty -L 100 rm -fr