Drop main() prototype. Syncs with NetBSD-8
[minix.git] / usr.bin / make / unit-tests / Makefile
blob10503b93dc9097b7a3c0f36c50b2cf7a35ca544a
1 # $NetBSD: Makefile,v 1.52 2015/05/05 21:51:09 sjg Exp $
3 # Unit tests for make(1)
4 # The main targets are:
5 #
6 # all: run all the tests
7 # test: run 'all', and compare to expected results
8 # accept: move generated output to expected results
10 # Adding a test case.
11 # Each feature should get its own set of tests in its own suitably
12 # named makefile (*.mk), with its own set of expected results (*.exp),
13 # and it should be added to the TESTNAMES list.
16 .MAIN: all
18 UNIT_TESTS:= ${.PARSEDIR}
19 .PATH: ${UNIT_TESTS}
21 # Each test is in a sub-makefile.
22 # Keep the list sorted.
23 TESTNAMES= \
24 comment \
25 cond1 \
26 cond2 \
27 error \
28 export \
29 export-all \
30 export-env \
31 doterror \
32 dotwait \
33 forloop \
34 forsubst \
35 hash \
36 misc \
37 moderrs \
38 modmatch \
39 modmisc \
40 modorder \
41 modts \
42 modword \
43 order \
44 posix \
45 qequals \
46 sunshcmd \
47 sysv \
48 ternary \
49 unexport \
50 unexport-env \
51 varcmd \
52 varmisc \
53 varshell
55 # these tests were broken by referting POSIX chanegs
56 STRICT_POSIX_TESTS = \
57 escape \
58 impsrc \
59 phony-end \
60 posix1 \
61 suffixes
63 # Override make flags for certain tests
64 flags.doterror=
65 flags.order=-j1
67 OUTFILES= ${TESTNAMES:S/$/.out/}
69 all: ${OUTFILES}
71 CLEANFILES += *.rawout *.out *.status *.tmp *.core *.tmp
72 CLEANFILES += obj*.[och] lib*.a # posix1.mk
73 CLEANFILES += issue* .[ab]* # suffixes.mk
74 CLEANRECURSIVE += dir dummy # posix1.mk
76 clean:
77 rm -f ${CLEANFILES}
78 .if !empty(CLEANRECURSIVE)
79 rm -rf ${CLEANRECURSIVE}
80 .endif
82 TEST_MAKE?= ${.MAKE}
83 TOOL_SED?= sed
85 # ensure consistent results from sort(1)
86 LC_ALL= C
87 LANG= C
88 .export LANG LC_ALL
90 # the tests are actually done with sub-makes.
91 .SUFFIXES: .mk .rawout .out
92 .mk.rawout:
93 @echo ${TEST_MAKE} ${flags.${.TARGET:R}:U-k} -f ${.IMPSRC}
94 -@cd ${.OBJDIR} && \
95 { ${TEST_MAKE} ${flags.${.TARGET:R}:U-k} -f ${.IMPSRC} \
96 2>&1 ; echo $$? >${.TARGET:R}.status ; } > ${.TARGET}.tmp
97 @mv ${.TARGET}.tmp ${.TARGET}
99 # We always pretend .MAKE was called 'make'
100 # and strip ${.CURDIR}/ from the output
101 # and replace anything after 'stopped in' with unit-tests
102 # so the results can be compared.
103 .rawout.out:
104 @echo postprocess ${.TARGET}
105 @${TOOL_SED} -e 's,^${TEST_MAKE:T:C/\./\\\./g}[][0-9]*:,make:,' \
106 -e 's,${TEST_MAKE:C/\./\\\./g},make,' \
107 -e '/stopped/s, /.*, unit-tests,' \
108 -e 's,${.CURDIR:C/\./\\\./g}/,,g' \
109 -e 's,${UNIT_TESTS:C/\./\\\./g}/,,g' \
110 < ${.IMPSRC} > ${.TARGET}.tmp
111 @echo "exit status `cat ${.TARGET:R}.status`" >> ${.TARGET}.tmp
112 @mv ${.TARGET}.tmp ${.TARGET}
114 # Compare all output files
115 test: ${OUTFILES} .PHONY
116 @failed= ; \
117 for test in ${TESTNAMES}; do \
118 diff -u ${UNIT_TESTS}/$${test}.exp $${test}.out \
119 || failed="$${failed}$${failed:+ }$${test}" ; \
120 done ; \
121 if [ -n "$${failed}" ]; then \
122 echo "Failed tests: $${failed}" ; false ; \
123 else \
124 echo "All tests passed" ; \
127 accept:
128 @for test in ${TESTNAMES}; do \
129 cmp -s ${UNIT_TESTS}/$${test}.exp $${test}.out \
130 || { echo "Replacing $${test}.exp" ; \
131 cp $${test}.out ${UNIT_TESTS}/$${test}.exp ; } \
132 done
134 .if exists(${TEST_MAKE})
135 ${TESTNAMES:S/$/.rawout/}: ${TEST_MAKE}
136 .endif
138 .-include <bsd.obj.mk>