Removed un-used tokens
[luakit.git] / build-utils / gperf.sh
blobf407682dc26f3840272a236b4b1be9d8f039ded7
1 #!/bin/sh
3 # Copyright (C) 2010 Mason Larobina <mason.larobina@gmail.com>
4 # Copyright (C) 2008 Pierre Habouzit <madcoder@debian.org>
6 # This program is free software: you can redistribute it and/or modify
7 # it under the terms of the GNU General Public License as published by
8 # the Free Software Foundation, either version 3 of the License, or
9 # (at your option) any later version.
11 # This program is distributed in the hope that it will be useful,
12 # but WITHOUT ANY WARRANTY; without even the implied warranty of
13 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 # GNU General Public License for more details.
16 # You should have received a copy of the GNU General Public License
17 # along with this program. If not, see <http://www.gnu.org/licenses/>.
20 die() {
21 echo "$@" 1>&2
22 exit 2
25 do_hdr() {
26 cat <<EOF
27 /* This file is autogenerated by $(basename $0) */
29 EOF
32 out=
33 type_t=
35 while true; do
36 test $# -gt 2 || break
37 case "$1" in
38 -o) shift; out="$1"; shift;;
39 -t) shift; type_t="$1"; shift;;
40 *) break;;
41 esac
42 done
44 do_h() {
45 cat <<EOF
46 `do_hdr`
47 #ifndef LUAKIT_COMMON_TOKENIZE_H
48 #define LUAKIT_COMMON_TOKENIZE_H
50 typedef enum luakit_token_t {
51 L_TK_UNKNOWN,
52 `tr '[:lower:]' '[:upper:]' | sed -e "s/^[^/].*/ L_TK_&,/"`
53 } luakit_token_t;
55 __attribute__((pure)) enum luakit_token_t l_tokenize(const char *s, int len);
56 #endif
57 EOF
60 do_tokens() {
61 while read tok; do
62 case "$tok" in
63 "") continue;;
65 echo "$tok, L_TK_`echo $tok | tr '[:lower:]' '[:upper:]'`"
67 esac
68 done
71 do_c() {
72 which gperf > /dev/null
73 if test $? = 1 ; then
74 echo "gperf not found. You need to install gperf." > /dev/stderr;
75 exit 1;
76 fi;
78 gperf -l -t -C -F",0" \
79 --language=ANSI-C -Nl_tokenize_aux <<EOF \
80 | sed -e '/__gnu_inline__/d;s/\<\(__\|\)inline\>//g'
82 `do_hdr`
84 #include <string.h>
85 #include "common/tokenize.h"
87 static const struct tok *l_tokenize_aux(const char *str, unsigned int len);
90 struct tok { const char *name; int val; };
92 `do_tokens`
95 luakit_token_t l_tokenize(const char *s, int len)
97 if (len < 0)
98 len = (int)strlen(s);
100 if (len) {
101 const struct tok *res = l_tokenize_aux(s, len);
102 return res ? res->val : L_TK_UNKNOWN;
103 } else {
104 return L_TK_UNKNOWN;
110 extract_tokens() {
111 grep '^### ' "$1" | cut -d ' ' -f 2
115 TOKENS_FILE="$1"
116 TARGET="$2"
118 trap "rm -f ${TARGET}" 0
120 rm -f "${TARGET}"
121 case "${TARGET}" in
122 *.h) do_h > "${TARGET}" < "${TOKENS_FILE}" ;;
123 *.c) do_c > "${TARGET}" < "${TOKENS_FILE}" ;;
124 *) die "you must ask for the 'h' or 'c' generation";;
125 esac
126 chmod -w "${TARGET}"
128 trap - 0
129 exit 0