8322 nl: misleading-indentation
[unleashed/tickless.git] / usr / src / cmd / gss / gsscred_clean / gsscred_clean.ksh
blob6ac5c35b2e08fb7a366b32a1b7fdaa6b58eb8802
1 #!/bin/ksh
3 # CDDL HEADER START
5 # The contents of this file are subject to the terms of the
6 # Common Development and Distribution License, Version 1.0 only
7 # (the "License"). You may not use this file except in compliance
8 # with the License.
10 # You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
11 # or http://www.opensolaris.org/os/licensing.
12 # See the License for the specific language governing permissions
13 # and limitations under the License.
15 # When distributing Covered Code, include this CDDL HEADER in each
16 # file and include the License file at usr/src/OPENSOLARIS.LICENSE.
17 # If applicable, add the following below this CDDL HEADER, with the
18 # fields enclosed by brackets "[]" replaced with your own identifying
19 # information: Portions Copyright [yyyy] [name of copyright owner]
21 # CDDL HEADER END
24 # Copyright (c) 1998 by Sun Microsystems, Inc.
25 # All rights reserved.
27 #pragma ident "%Z%%M% %I% %E% SMI"
29 # gsscred_db clean up script
31 # This file is used to remove duplicate entries from
32 # the gsscred_db file. It is activated as a root cron
33 # job once a day. It only performs cleanup when
34 # the gsscred_db file has changed since last operation.
36 FILE_TO_CLEAN=/etc/gss/gsscred_db
37 CLEAN_TIME=/etc/gss/.gsscred_clean
38 TMP_FILE=/etc/gss/gsscred_clean$$
40 trap "rm -f $TMP_FILE; exit" 0 1 2 3 13 15
43 if [ -s $FILE_TO_CLEAN ] && [ $FILE_TO_CLEAN -nt $CLEAN_TIME ]
44 then
47 # The file being sorted has the following format:
48 # name uid comment
50 # We are trying to remove duplicate entries for the name
51 # which may have different uids. Entries lower in the file
52 # are newer since addition performs an append. We use cat -n
53 # in order to preserve the order of the duplicate entries and
54 # only keep the latest. We then sort on the name, and line
55 # number (line number in reverse). The line numbers are then
56 # removed and duplicate entries are cut out.
58 cat -n $FILE_TO_CLEAN | sort -k 2,2 -k 1,1nr 2> /dev/null \
59 | cut -f2- | \
60 awk ' (NR > 1 && $1 != key) || NR == 1 {
61 key = $1;
62 print $0;
64 ' > $TMP_FILE
66 if [ $? -eq 0 ] && mv $TMP_FILE $FILE_TO_CLEAN; then
68 # update time stamp for this sort
70 touch -r $FILE_TO_CLEAN $CLEAN_TIME
71 else
72 rm -f $TMP_FILE