vuls: init at 0.27.0
[NixPkgs.git] / lib / fileset / tests.sh
blob405fa04d8e06bd0227d6d07317839d914cc81114
1 #!/usr/bin/env bash
2 # shellcheck disable=SC2016
3 # shellcheck disable=SC2317
4 # shellcheck disable=SC2192
6 # Tests lib.fileset
7 # Run:
8 # [nixpkgs]$ lib/fileset/tests.sh
9 # or:
10 # [nixpkgs]$ nix-build lib/tests/release.nix
12 set -euo pipefail
13 shopt -s inherit_errexit dotglob
15 die() {
16 # The second to last entry contains the line number of the top-level caller
17 lineIndex=$(( ${#BASH_LINENO[@]} - 2 ))
18 echo >&2 -e "test case at ${BASH_SOURCE[0]}:${BASH_LINENO[$lineIndex]} failed:" "$@"
19 exit 1
22 if test -n "${TEST_LIB:-}"; then
23 NIX_PATH=nixpkgs="$(dirname "$TEST_LIB")"
24 else
25 NIX_PATH=nixpkgs="$(cd "$(dirname "${BASH_SOURCE[0]}")/../.."; pwd)"
27 export NIX_PATH
29 tmp="$(mktemp -d)"
30 clean_up() {
31 rm -rf "$tmp"
33 trap clean_up EXIT SIGINT SIGTERM
34 work="$tmp/work"
35 mkdir "$work"
36 cd "$work"
38 # Crudely unquotes a JSON string by just taking everything between the first and the second quote.
39 # We're only using this for resulting /nix/store paths, which can't contain " anyways,
40 # nor can they contain any other characters that would need to be escaped specially in JSON
41 # This way we don't need to add a dependency on e.g. jq
42 crudeUnquoteJSON() {
43 cut -d \" -f2
46 prefixExpression='
47 let
48 lib = import <nixpkgs/lib>;
49 internal = import <nixpkgs/lib/fileset/internal.nix> {
50 inherit lib;
53 with lib;
54 with internal;
55 with lib.fileset;
58 # Check that two nix expression successfully evaluate to the same value.
59 # The expressions have `lib.fileset` in scope.
60 # Usage: expectEqual NIX NIX
61 expectEqual() {
62 local actualExpr=$1
63 local expectedExpr=$2
64 if actualResult=$(nix-instantiate --eval --strict --show-trace 2>"$tmp"/actualStderr \
65 --expr "$prefixExpression ($actualExpr)"); then
66 actualExitCode=$?
67 else
68 actualExitCode=$?
70 actualStderr=$(< "$tmp"/actualStderr)
72 if expectedResult=$(nix-instantiate --eval --strict --show-trace 2>"$tmp"/expectedStderr \
73 --expr "$prefixExpression ($expectedExpr)"); then
74 expectedExitCode=$?
75 else
76 expectedExitCode=$?
78 expectedStderr=$(< "$tmp"/expectedStderr)
80 if [[ "$actualExitCode" != "$expectedExitCode" ]]; then
81 echo "$actualStderr" >&2
82 echo "$actualResult" >&2
83 die "$actualExpr should have exited with $expectedExitCode, but it exited with $actualExitCode"
86 if [[ "$actualResult" != "$expectedResult" ]]; then
87 die "$actualExpr should have evaluated to $expectedExpr:\n$expectedResult\n\nbut it evaluated to\n$actualResult"
90 if [[ "$actualStderr" != "$expectedStderr" ]]; then
91 die "$actualExpr should have had this on stderr:\n$expectedStderr\n\nbut it was\n$actualStderr"
95 # Check that a nix expression evaluates successfully to a store path and returns it (without quotes).
96 # The expression has `lib.fileset` in scope.
97 # Usage: expectStorePath NIX
98 expectStorePath() {
99 local expr=$1
100 if ! result=$(nix-instantiate --eval --strict --json --read-write-mode --show-trace 2>"$tmp"/stderr \
101 --expr "$prefixExpression ($expr)"); then
102 cat "$tmp/stderr" >&2
103 die "$expr failed to evaluate, but it was expected to succeed"
105 # This is safe because we assume to get back a store path in a string
106 crudeUnquoteJSON <<< "$result"
109 # Check that a nix expression fails to evaluate (strictly, read-write-mode).
110 # And check the received stderr against a regex
111 # The expression has `lib.fileset` in scope.
112 # Usage: expectFailure NIX REGEX
113 expectFailure() {
114 local expr=$1
115 local expectedErrorRegex=$2
116 if result=$(nix-instantiate --eval --strict --read-write-mode --show-trace 2>"$tmp/stderr" \
117 --expr "$prefixExpression $expr"); then
118 die "$expr evaluated successfully to $result, but it was expected to fail"
120 stderr=$(<"$tmp/stderr")
121 if [[ ! "$stderr" =~ $expectedErrorRegex ]]; then
122 die "$expr should have errored with this regex pattern:\n\n$expectedErrorRegex\n\nbut this was the actual error:\n\n$stderr"
126 # Check that the traces of a Nix expression are as expected when evaluated.
127 # The expression has `lib.fileset` in scope.
128 # Usage: expectTrace NIX STR
129 expectTrace() {
130 local expr=$1
131 local expectedTrace=$2
133 nix-instantiate --eval --show-trace >/dev/null 2>"$tmp"/stderrTrace \
134 --expr "$prefixExpression trace ($expr)" || true
136 actualTrace=$(sed -n 's/^trace: //p' "$tmp/stderrTrace")
138 nix-instantiate --eval --show-trace >/dev/null 2>"$tmp"/stderrTraceVal \
139 --expr "$prefixExpression traceVal ($expr)" || true
141 actualTraceVal=$(sed -n 's/^trace: //p' "$tmp/stderrTraceVal")
143 # Test that traceVal returns the same trace as trace
144 if [[ "$actualTrace" != "$actualTraceVal" ]]; then
145 cat "$tmp"/stderrTrace >&2
146 die "$expr traced this for lib.fileset.trace:\n\n$actualTrace\n\nand something different for lib.fileset.traceVal:\n\n$actualTraceVal"
149 if [[ "$actualTrace" != "$expectedTrace" ]]; then
150 cat "$tmp"/stderrTrace >&2
151 die "$expr should have traced this:\n\n$expectedTrace\n\nbut this was actually traced:\n\n$actualTrace"
155 # We conditionally use inotifywait in withFileMonitor.
156 # Check early whether it's available
157 # TODO: Darwin support, though not crucial since we have Linux CI
158 if type inotifywait 2>/dev/null >/dev/null; then
159 canMonitor=1
160 else
161 echo "Warning: Cannot check for paths not getting read since the inotifywait command (from the inotify-tools package) is not available" >&2
162 canMonitor=
165 # Run a function while monitoring that it doesn't read certain paths
166 # Usage: withFileMonitor FUNNAME PATH...
167 # - FUNNAME should be a bash function that:
168 # - Performs some operation that should not read some paths
169 # - Delete the paths it shouldn't read without triggering any open events
170 # - PATH... are the paths that should not get read
172 # This function outputs the same as FUNNAME
173 withFileMonitor() {
174 local funName=$1
175 shift
177 # If we can't monitor files or have none to monitor, just run the function directly
178 if [[ -z "$canMonitor" ]] || (( "$#" == 0 )); then
179 "$funName"
180 else
182 # Use a subshell to start the coprocess in and use a trap to kill it when exiting the subshell
184 # Assigned by coproc, makes shellcheck happy
185 local watcher watcher_PID
187 # Start inotifywait in the background to monitor all excluded paths
188 coproc watcher {
189 # inotifywait outputs a string on stderr when ready
190 # Redirect it to stdout so we can access it from the coproc's stdout fd
191 # exec so that the coprocess is inotify itself, making the kill below work correctly
192 # See below why we listen to both open and delete_self events
193 exec inotifywait --format='%e %w' --event open,delete_self --monitor "$@" 2>&1
196 # This will trigger when this subshell exits, no matter if successful or not
197 # After exiting the subshell, the parent shell will continue executing
198 trap 'kill "${watcher_PID}"' exit
200 # Synchronously wait until inotifywait is ready
201 while read -r -u "${watcher[0]}" line && [[ "$line" != "Watches established." ]]; do
203 done
205 # Call the function that should not read the given paths and delete them afterwards
206 "$funName"
208 # Get the first event
209 read -r -u "${watcher[0]}" event file
211 # With funName potentially reading files first before deleting them,
212 # there's only these two possible event timelines:
213 # - open*, ..., open*, delete_self, ..., delete_self: If some excluded paths were read
214 # - delete_self, ..., delete_self: If no excluded paths were read
215 # So by looking at the first event we can figure out which one it is!
216 # This also means we don't have to wait to collect all events.
217 case "$event" in
218 OPEN*)
219 die "$funName opened excluded file $file when it shouldn't have"
221 DELETE_SELF)
222 # Expected events
225 die "During $funName, Unexpected event type '$event' on file $file that should be excluded"
227 esac
233 # Create the tree structure declared in the tree variable, usage:
235 # tree=(
236 # [a/b] = # Declare that file a/b should exist
237 # [c/a] = # Declare that file c/a should exist
238 # [c/d/]= # Declare that directory c/d/ should exist
240 # createTree
241 declare -A tree
242 createTree() {
243 # Track which paths need to be created
244 local -a dirsToCreate=()
245 local -a filesToCreate=()
246 for p in "${!tree[@]}"; do
247 # If keys end with a `/` we treat them as directories, otherwise files
248 if [[ "$p" =~ /$ ]]; then
249 dirsToCreate+=("$p")
250 else
251 filesToCreate+=("$p")
253 done
255 # Create all the necessary paths.
256 # This is done with only a fixed number of processes,
257 # in order to not be too slow
258 # Though this does mean we're a bit limited with how many files can be created
259 if (( ${#dirsToCreate[@]} != 0 )); then
260 mkdir -p "${dirsToCreate[@]}"
262 if (( ${#filesToCreate[@]} != 0 )); then
263 readarray -d '' -t parentsToCreate < <(dirname -z "${filesToCreate[@]}")
264 mkdir -p "${parentsToCreate[@]}"
265 touch "${filesToCreate[@]}"
269 # Check whether a file set includes/excludes declared paths as expected, usage:
271 # tree=(
272 # [a/b] =1 # Declare that file a/b should exist and expect it to be included in the store path
273 # [c/a] = # Declare that file c/a should exist and expect it to be excluded in the store path
274 # [c/d/]= # Declare that directory c/d/ should exist and expect it to be excluded in the store path
276 # checkFileset './a' # Pass the fileset as the argument
277 checkFileset() {
278 local fileset=$1
280 # Create the tree
281 createTree
283 # Process the tree into separate arrays for included paths, excluded paths and excluded files.
284 local -a included=()
285 local -a includedFiles=()
286 local -a excluded=()
287 local -a excludedFiles=()
288 for p in "${!tree[@]}"; do
289 case "${tree[$p]}" in
291 included+=("$p")
292 # If keys end with a `/` we treat them as directories, otherwise files
293 if [[ ! "$p" =~ /$ ]]; then
294 includedFiles+=("$p")
298 excluded+=("$p")
299 if [[ ! "$p" =~ /$ ]]; then
300 excludedFiles+=("$p")
304 die "Unsupported tree value: ${tree[$p]}"
305 esac
306 done
308 # Test that lib.fileset.toList contains exactly the included files.
309 # The /#/./ part prefixes each element with `./`
310 expectEqual "toList ($fileset)" "sort lessThan [ ${includedFiles[*]/#/./} ]"
312 expression="toSource { root = ./.; fileset = $fileset; }"
314 # We don't have lambda's in bash unfortunately,
315 # so we just define a function instead and then pass its name
316 # shellcheck disable=SC2317
317 run() {
318 # Call toSource with the fileset, triggering open events for all files that are added to the store
319 expectStorePath "$expression"
320 if (( ${#excludedFiles[@]} != 0 )); then
321 rm "${excludedFiles[@]}"
325 # Runs the function while checking that the given excluded files aren't read
326 storePath=$(withFileMonitor run "${excludedFiles[@]}")
328 # For each path that should be included, make sure it does occur in the resulting store path
329 for p in "${included[@]}"; do
330 if [[ ! -e "$storePath/$p" ]]; then
331 die "$expression doesn't include path $p when it should have"
333 done
335 # For each path that should be excluded, make sure it doesn't occur in the resulting store path
336 for p in "${excluded[@]}"; do
337 if [[ -e "$storePath/$p" ]]; then
338 die "$expression included path $p when it shouldn't have"
340 done
342 rm -rf -- *
346 #### Error messages #####
348 # We're using [[:blank:]] here instead of \s, because only the former is POSIX
349 # (see https://pubs.opengroup.org/onlinepubs/007908799/xbd/re.html#tag_007_003_005).
350 # And indeed, Darwin's bash only supports the former
352 # Absolute paths in strings cannot be passed as `root`
353 expectFailure 'toSource { root = "/nix/store/foobar"; fileset = ./.; }' 'lib.fileset.toSource: `root` \(/nix/store/foobar\) is a string-like value, but it should be a path instead.
354 [[:blank:]]*Paths in strings are not supported by `lib.fileset`, use `lib.sources` or derivations instead.'
356 expectFailure 'toSource { root = cleanSourceWith { src = ./.; }; fileset = ./.; }' 'lib.fileset.toSource: `root` is a `lib.sources`-based value, but it should be a path instead.
357 [[:blank:]]*To use a `lib.sources`-based value, convert it to a file set using `lib.fileset.fromSource` and pass it as `fileset`.
358 [[:blank:]]*Note that this only works for sources created from paths.'
360 # Only paths are accepted as `root`
361 expectFailure 'toSource { root = 10; fileset = ./.; }' 'lib.fileset.toSource: `root` is of type int, but it should be a path instead.'
363 # Different filesystem roots in root and fileset are not supported
364 mkdir -p {foo,bar}/mock-root
365 expectFailure 'with ((import <nixpkgs/lib>).extend (import <nixpkgs/lib/fileset/mock-splitRoot.nix>)).fileset;
366 toSource { root = ./foo/mock-root; fileset = ./bar/mock-root; }
367 ' 'lib.fileset.toSource: Filesystem roots are not the same for `fileset` and `root` \('"$work"'/foo/mock-root\):
368 [[:blank:]]*`root`: Filesystem root is "'"$work"'/foo/mock-root"
369 [[:blank:]]*`fileset`: Filesystem root is "'"$work"'/bar/mock-root"
370 [[:blank:]]*Different filesystem roots are not supported.'
371 rm -rf -- *
373 # `root` needs to exist
374 expectFailure 'toSource { root = ./a; fileset = ./.; }' 'lib.fileset.toSource: `root` \('"$work"'/a\) is a path that does not exist.'
376 # `root` needs to be a file
377 touch a
378 expectFailure 'toSource { root = ./a; fileset = ./a; }' 'lib.fileset.toSource: `root` \('"$work"'/a\) is a file, but it should be a directory instead. Potential solutions:
379 [[:blank:]]*- If you want to import the file into the store _without_ a containing directory, use string interpolation or `builtins.path` instead of this function.
380 [[:blank:]]*- If you want to import the file into the store _with_ a containing directory, set `root` to the containing directory, such as '"$work"', and set `fileset` to the file path.'
381 rm -rf -- *
383 # The fileset argument should be evaluated, even if the directory is empty
384 expectFailure 'toSource { root = ./.; fileset = abort "This should be evaluated"; }' 'evaluation aborted with the following error message: '\''This should be evaluated'\'
386 # Only paths under `root` should be able to influence the result
387 mkdir a
388 expectFailure 'toSource { root = ./a; fileset = ./.; }' 'lib.fileset.toSource: `fileset` could contain files in '"$work"', which is not under the `root` \('"$work"'/a\). Potential solutions:
389 [[:blank:]]*- Set `root` to '"$work"' or any directory higher up. This changes the layout of the resulting store path.
390 [[:blank:]]*- Set `fileset` to a file set that cannot contain files outside the `root` \('"$work"'/a\). This could change the files included in the result.'
391 rm -rf -- *
393 # non-regular and non-symlink files cannot be added to the Nix store
394 mkfifo a
395 expectFailure 'toSource { root = ./.; fileset = ./a; }' 'lib.fileset.toSource: `fileset` contains a file that cannot be added to the store: '"$work"'/a
396 [[:blank:]]*This file is neither a regular file nor a symlink, the only file types supported by the Nix store.
397 [[:blank:]]*Therefore the file set cannot be added to the Nix store as is. Make sure to not include that file to avoid this error.'
398 rm -rf -- *
400 # Path coercion only works for paths
401 expectFailure 'toSource { root = ./.; fileset = 10; }' 'lib.fileset.toSource: `fileset` is of type int, but it should be a file set or a path instead.'
402 expectFailure 'toSource { root = ./.; fileset = "/some/path"; }' 'lib.fileset.toSource: `fileset` \("/some/path"\) is a string-like value, but it should be a file set or a path instead.
403 [[:blank:]]*Paths represented as strings are not supported by `lib.fileset`, use `lib.sources` or derivations instead.'
404 expectFailure 'toSource { root = ./.; fileset = cleanSourceWith { src = ./.; }; }' 'lib.fileset.toSource: `fileset` is a `lib.sources`-based value, but it should be a file set or a path instead.
405 [[:blank:]]*To convert a `lib.sources`-based value to a file set you can use `lib.fileset.fromSource`.
406 [[:blank:]]*Note that this only works for sources created from paths.'
408 # Path coercion errors for non-existent paths
409 expectFailure 'toSource { root = ./.; fileset = ./a; }' 'lib.fileset.toSource: `fileset` \('"$work"'/a\) is a path that does not exist.
410 [[:blank:]]*To create a file set from a path that may not exist, use `lib.fileset.maybeMissing`.'
412 # File sets cannot be evaluated directly
413 expectFailure 'union ./. ./.' 'lib.fileset: Directly evaluating a file set is not supported.
414 [[:blank:]]*To turn it into a usable source, use `lib.fileset.toSource`.
415 [[:blank:]]*To pretty-print the contents, use `lib.fileset.trace` or `lib.fileset.traceVal`.'
416 expectFailure '_emptyWithoutBase' 'lib.fileset: Directly evaluating a file set is not supported.
417 [[:blank:]]*To turn it into a usable source, use `lib.fileset.toSource`.
418 [[:blank:]]*To pretty-print the contents, use `lib.fileset.trace` or `lib.fileset.traceVal`.'
420 # Past versions of the internal representation are supported
421 expectEqual '_coerce "<tests>: value" { _type = "fileset"; _internalVersion = 0; _internalBase = ./.; }' \
422 '{ _internalBase = ./.; _internalBaseComponents = path.subpath.components (path.splitRoot ./.).subpath; _internalBaseRoot = /.; _internalIsEmptyWithoutBase = false; _internalVersion = 3; _type = "fileset"; }'
423 expectEqual '_coerce "<tests>: value" { _type = "fileset"; _internalVersion = 1; }' \
424 '{ _type = "fileset"; _internalIsEmptyWithoutBase = false; _internalVersion = 3; }'
425 expectEqual '_coerce "<tests>: value" { _type = "fileset"; _internalVersion = 2; }' \
426 '{ _type = "fileset"; _internalIsEmptyWithoutBase = false; _internalVersion = 3; }'
428 # Future versions of the internal representation are unsupported
429 expectFailure '_coerce "<tests>: value" { _type = "fileset"; _internalVersion = 4; }' '<tests>: value is a file set created from a future version of the file set library with a different internal representation:
430 [[:blank:]]*- Internal version of the file set: 4
431 [[:blank:]]*- Internal version of the library: 3
432 [[:blank:]]*Make sure to update your Nixpkgs to have a newer version of `lib.fileset`.'
434 # _create followed by _coerce should give the inputs back without any validation
435 expectEqual '{
436 inherit (_coerce "<test>" (_create ./. "directory"))
437 _internalVersion _internalBase _internalTree;
438 }' '{ _internalBase = ./.; _internalTree = "directory"; _internalVersion = 3; }'
440 #### Resulting store path ####
442 # The store path name should be "source"
443 expectEqual 'toSource { root = ./.; fileset = ./.; }' 'sources.cleanSourceWith { name = "source"; src = ./.; }'
445 # We should be able to import an empty directory and end up with an empty result
446 tree=(
448 checkFileset './.'
450 # The empty value without a base should also result in an empty result
451 tree=(
452 [a]=0
454 checkFileset '_emptyWithoutBase'
456 # Directories recursively containing no files are not included
457 tree=(
458 [e/]=0
459 [d/e/]=0
460 [d/d/e/]=0
461 [d/d/f]=1
462 [d/f]=1
463 [f]=1
465 checkFileset './.'
467 # Check trees that could cause a naïve string prefix checking implementation to fail
468 tree=(
469 [a]=0
470 [ab/x]=0
471 [ab/xy]=1
472 [ab/xyz]=0
473 [abc]=0
475 checkFileset './ab/xy'
477 # Check path coercion examples in ../../doc/functions/fileset.section.md
478 tree=(
479 [a/x]=1
480 [a/b/y]=1
481 [c/]=0
482 [c/d/]=0
484 checkFileset './.'
486 tree=(
487 [a/x]=1
488 [a/b/y]=1
489 [c/]=0
490 [c/d/]=0
492 checkFileset './a'
494 tree=(
495 [a/x]=1
496 [a/b/y]=0
497 [c/]=0
498 [c/d/]=0
500 checkFileset './a/x'
502 tree=(
503 [a/x]=0
504 [a/b/y]=1
505 [c/]=0
506 [c/d/]=0
508 checkFileset './a/b'
510 tree=(
511 [a/x]=0
512 [a/b/y]=0
513 [c/]=0
514 [c/d/]=0
516 checkFileset './c'
518 # Test the source filter for the somewhat special case of files in the filesystem root
519 # We can't easily test this with the above functions because we can't write to the filesystem root and we don't want to make any assumptions which files are there in the sandbox
520 expectEqual '_toSourceFilter (_create /. null) "/foo" ""' 'false'
521 expectEqual '_toSourceFilter (_create /. { foo = "regular"; }) "/foo" ""' 'true'
522 expectEqual '_toSourceFilter (_create /. { foo = null; }) "/foo" ""' 'false'
525 ## lib.fileset.toList
526 # This function is mainly tested in checkFileset
528 # The error context for an invalid argument must be correct
529 expectFailure 'toList null' 'lib.fileset.toList: Argument is of type null, but it should be a file set or a path instead.'
531 # Works for the empty fileset
532 expectEqual 'toList _emptyWithoutBase' '[ ]'
534 # Works on empty paths
535 expectEqual 'toList ./.' '[ ]'
538 ## lib.fileset.union, lib.fileset.unions
541 # Different filesystem roots in root and fileset are not supported
542 mkdir -p {foo,bar}/mock-root
543 expectFailure 'with ((import <nixpkgs/lib>).extend (import <nixpkgs/lib/fileset/mock-splitRoot.nix>)).fileset;
544 toSource { root = ./.; fileset = union ./foo/mock-root ./bar/mock-root; }
545 ' 'lib.fileset.union: Filesystem roots are not the same:
546 [[:blank:]]*First argument: Filesystem root is "'"$work"'/foo/mock-root"
547 [[:blank:]]*Second argument: Filesystem root is "'"$work"'/bar/mock-root"
548 [[:blank:]]*Different filesystem roots are not supported.'
550 expectFailure 'with ((import <nixpkgs/lib>).extend (import <nixpkgs/lib/fileset/mock-splitRoot.nix>)).fileset;
551 toSource { root = ./.; fileset = unions [ ./foo/mock-root ./bar/mock-root ]; }
552 ' 'lib.fileset.unions: Filesystem roots are not the same:
553 [[:blank:]]*Element 0: Filesystem root is "'"$work"'/foo/mock-root"
554 [[:blank:]]*Element 1: Filesystem root is "'"$work"'/bar/mock-root"
555 [[:blank:]]*Different filesystem roots are not supported.'
556 rm -rf -- *
558 # Coercion errors show the correct context
559 expectFailure 'toSource { root = ./.; fileset = union ./a ./.; }' 'lib.fileset.union: First argument \('"$work"'/a\) is a path that does not exist.'
560 expectFailure 'toSource { root = ./.; fileset = union ./. ./b; }' 'lib.fileset.union: Second argument \('"$work"'/b\) is a path that does not exist.'
561 expectFailure 'toSource { root = ./.; fileset = unions [ ./a ./. ]; }' 'lib.fileset.unions: Element 0 \('"$work"'/a\) is a path that does not exist.'
562 expectFailure 'toSource { root = ./.; fileset = unions [ ./. ./b ]; }' 'lib.fileset.unions: Element 1 \('"$work"'/b\) is a path that does not exist.'
564 # unions needs a list
565 expectFailure 'toSource { root = ./.; fileset = unions null; }' 'lib.fileset.unions: Argument is of type null, but it should be a list instead.'
567 # The tree of later arguments should not be evaluated if a former argument already includes all files
568 tree=()
569 checkFileset 'union ./. (_create ./. (abort "This should not be used!"))'
570 checkFileset 'unions [ ./. (_create ./. (abort "This should not be used!")) ]'
572 # unions doesn't include any files for an empty list or only empty values without a base
573 tree=(
574 [x]=0
575 [y/z]=0
577 checkFileset 'unions [ ]'
578 checkFileset 'unions [ _emptyWithoutBase ]'
579 checkFileset 'unions [ _emptyWithoutBase _emptyWithoutBase ]'
580 checkFileset 'union _emptyWithoutBase _emptyWithoutBase'
582 # The empty value without a base is the left and right identity of union
583 tree=(
584 [x]=1
585 [y/z]=0
587 checkFileset 'union ./x _emptyWithoutBase'
588 checkFileset 'union _emptyWithoutBase ./x'
590 # union doesn't include files that weren't specified
591 tree=(
592 [x]=1
593 [y]=1
594 [z]=0
596 checkFileset 'union ./x ./y'
597 checkFileset 'unions [ ./x ./y ]'
599 # Also for directories
600 tree=(
601 [x/a]=1
602 [x/b]=1
603 [y/a]=1
604 [y/b]=1
605 [z/a]=0
606 [z/b]=0
608 checkFileset 'union ./x ./y'
609 checkFileset 'unions [ ./x ./y ]'
611 # And for very specific paths
612 tree=(
613 [x/a]=1
614 [x/b]=0
615 [y/a]=0
616 [y/b]=1
617 [z/a]=0
618 [z/b]=0
620 checkFileset 'union ./x/a ./y/b'
621 checkFileset 'unions [ ./x/a ./y/b ]'
623 # unions or chained union's can include more paths
624 tree=(
625 [x/a]=1
626 [x/b]=1
627 [y/a]=1
628 [y/b]=0
629 [z/a]=0
630 [z/b]=1
632 checkFileset 'unions [ ./x/a ./x/b ./y/a ./z/b ]'
633 checkFileset 'union (union ./x/a ./x/b) (union ./y/a ./z/b)'
634 checkFileset 'union (union (union ./x/a ./x/b) ./y/a) ./z/b'
636 # unions should not stack overflow, even if many elements are passed
637 tree=()
638 for i in $(seq 1000); do
639 tree[$i/a]=1
640 tree[$i/b]=0
641 done
642 # This is actually really hard to test:
643 # A lot of files would be needed to cause a stack overflow.
644 # And while we could limit the maximum stack size using `ulimit -s`,
645 # that turns out to not be very deterministic: https://github.com/NixOS/nixpkgs/pull/256417#discussion_r1339396686.
646 # Meanwhile, the test infra here is not the fastest, creating 10000 would be too slow.
647 # So, just using 1000 files for now.
648 checkFileset 'unions (mapAttrsToList (name: _: ./. + "/${name}/a") (builtins.readDir ./.))'
651 ## lib.fileset.intersection
654 # Different filesystem roots in root and fileset are not supported
655 mkdir -p {foo,bar}/mock-root
656 expectFailure 'with ((import <nixpkgs/lib>).extend (import <nixpkgs/lib/fileset/mock-splitRoot.nix>)).fileset;
657 toSource { root = ./.; fileset = intersection ./foo/mock-root ./bar/mock-root; }
658 ' 'lib.fileset.intersection: Filesystem roots are not the same:
659 [[:blank:]]*First argument: Filesystem root is "'"$work"'/foo/mock-root"
660 [[:blank:]]*Second argument: Filesystem root is "'"$work"'/bar/mock-root"
661 [[:blank:]]*Different filesystem roots are not supported.'
662 rm -rf -- *
664 # Coercion errors show the correct context
665 expectFailure 'toSource { root = ./.; fileset = intersection ./a ./.; }' 'lib.fileset.intersection: First argument \('"$work"'/a\) is a path that does not exist.'
666 expectFailure 'toSource { root = ./.; fileset = intersection ./. ./b; }' 'lib.fileset.intersection: Second argument \('"$work"'/b\) is a path that does not exist.'
668 # The tree of later arguments should not be evaluated if a former argument already excludes all files
669 tree=(
670 [a]=0
672 checkFileset 'intersection _emptyWithoutBase (_create ./. (abort "This should not be used!"))'
673 # We don't have any combinators that can explicitly remove files yet, so we need to rely on internal functions to test this for now
674 checkFileset 'intersection (_create ./. { a = null; }) (_create ./. { a = abort "This should not be used!"; })'
676 # If either side is empty, the result is empty
677 tree=(
678 [a]=0
680 checkFileset 'intersection _emptyWithoutBase _emptyWithoutBase'
681 checkFileset 'intersection _emptyWithoutBase (_create ./. null)'
682 checkFileset 'intersection (_create ./. null) _emptyWithoutBase'
683 checkFileset 'intersection (_create ./. null) (_create ./. null)'
685 # If the intersection base paths are not overlapping, the result is empty and has no base path
686 mkdir a b c
687 touch {a,b,c}/x
688 expectEqual 'toSource { root = ./c; fileset = intersection ./a ./b; }' 'toSource { root = ./c; fileset = _emptyWithoutBase; }'
689 rm -rf -- *
691 # If the intersection exists, the resulting base path is the longest of them
692 mkdir a
693 touch x a/b
694 expectEqual 'toSource { root = ./a; fileset = intersection ./a ./.; }' 'toSource { root = ./a; fileset = ./a; }'
695 expectEqual 'toSource { root = ./a; fileset = intersection ./. ./a; }' 'toSource { root = ./a; fileset = ./a; }'
696 rm -rf -- *
698 # Also finds the intersection with null'd filesetTree's
699 tree=(
700 [a]=0
701 [b]=1
702 [c]=0
704 checkFileset 'intersection (_create ./. { a = "regular"; b = "regular"; c = null; }) (_create ./. { a = null; b = "regular"; c = "regular"; })'
706 # Actually computes the intersection between files
707 tree=(
708 [a]=0
709 [b]=0
710 [c]=1
711 [d]=1
712 [e]=0
713 [f]=0
715 checkFileset 'intersection (unions [ ./a ./b ./c ./d ]) (unions [ ./c ./d ./e ./f ])'
717 tree=(
718 [a/x]=0
719 [a/y]=0
720 [b/x]=1
721 [b/y]=1
722 [c/x]=0
723 [c/y]=0
725 checkFileset 'intersection ./b ./.'
726 checkFileset 'intersection ./b (unions [ ./a/x ./a/y ./b/x ./b/y ./c/x ./c/y ])'
728 # Complicated case
729 tree=(
730 [a/x]=0
731 [a/b/i]=1
732 [c/d/x]=0
733 [c/d/f]=1
734 [c/x]=0
735 [c/e/i]=1
736 [c/e/j]=1
738 checkFileset 'intersection (unions [ ./a/b ./c/d ./c/e ]) (unions [ ./a ./c/d/f ./c/e ])'
740 ## Difference
742 # Subtracting something from itself results in nothing
743 tree=(
744 [a]=0
746 checkFileset 'difference ./. ./.'
748 # The tree of the second argument should not be evaluated if not needed
749 checkFileset 'difference _emptyWithoutBase (_create ./. (abort "This should not be used!"))'
750 checkFileset 'difference (_create ./. null) (_create ./. (abort "This should not be used!"))'
752 # Subtracting nothing gives the same thing back
753 tree=(
754 [a]=1
756 checkFileset 'difference ./. _emptyWithoutBase'
757 checkFileset 'difference ./. (_create ./. null)'
759 # Subtracting doesn't influence the base path
760 mkdir a b
761 touch {a,b}/x
762 expectEqual 'toSource { root = ./a; fileset = difference ./a ./b; }' 'toSource { root = ./a; fileset = ./a; }'
763 rm -rf -- *
765 # Also not the other way around
766 mkdir a
767 expectFailure 'toSource { root = ./a; fileset = difference ./. ./a; }' 'lib.fileset.toSource: `fileset` could contain files in '"$work"', which is not under the `root` \('"$work"'/a\). Potential solutions:
768 [[:blank:]]*- Set `root` to '"$work"' or any directory higher up. This changes the layout of the resulting store path.
769 [[:blank:]]*- Set `fileset` to a file set that cannot contain files outside the `root` \('"$work"'/a\). This could change the files included in the result.'
770 rm -rf -- *
772 # Difference actually works
773 # We test all combinations of ./., ./a, ./a/x and ./b
774 tree=(
775 [a/x]=0
776 [a/y]=0
777 [b]=0
778 [c]=0
780 checkFileset 'difference ./. ./.'
781 checkFileset 'difference ./a ./.'
782 checkFileset 'difference ./a/x ./.'
783 checkFileset 'difference ./b ./.'
784 checkFileset 'difference ./a ./a'
785 checkFileset 'difference ./a/x ./a'
786 checkFileset 'difference ./a/x ./a/x'
787 checkFileset 'difference ./b ./b'
788 tree=(
789 [a/x]=0
790 [a/y]=0
791 [b]=1
792 [c]=1
794 checkFileset 'difference ./. ./a'
795 tree=(
796 [a/x]=1
797 [a/y]=1
798 [b]=0
799 [c]=0
801 checkFileset 'difference ./a ./b'
802 tree=(
803 [a/x]=1
804 [a/y]=0
805 [b]=0
806 [c]=0
808 checkFileset 'difference ./a/x ./b'
809 tree=(
810 [a/x]=0
811 [a/y]=1
812 [b]=0
813 [c]=0
815 checkFileset 'difference ./a ./a/x'
816 tree=(
817 [a/x]=0
818 [a/y]=0
819 [b]=1
820 [c]=0
822 checkFileset 'difference ./b ./a'
823 checkFileset 'difference ./b ./a/x'
824 tree=(
825 [a/x]=0
826 [a/y]=1
827 [b]=1
828 [c]=1
830 checkFileset 'difference ./. ./a/x'
831 tree=(
832 [a/x]=1
833 [a/y]=1
834 [b]=0
835 [c]=1
837 checkFileset 'difference ./. ./b'
839 ## File filter
841 # The first argument needs to be a function
842 expectFailure 'fileFilter null (abort "this is not needed")' 'lib.fileset.fileFilter: First argument is of type null, but it should be a function instead.'
844 # The second argument needs to be an existing path
845 expectFailure 'fileFilter (file: abort "this is not needed") _emptyWithoutBase' 'lib.fileset.fileFilter: Second argument is a file set, but it should be a path instead.
846 [[:blank:]]*If you need to filter files in a file set, use `intersection fileset \(fileFilter pred \./\.\)` instead.'
847 expectFailure 'fileFilter (file: abort "this is not needed") null' 'lib.fileset.fileFilter: Second argument is of type null, but it should be a path instead.'
848 expectFailure 'fileFilter (file: abort "this is not needed") ./a' 'lib.fileset.fileFilter: Second argument \('"$work"'/a\) is a path that does not exist.'
850 # The predicate is not called when there's no files
851 tree=()
852 checkFileset 'fileFilter (file: abort "this is not needed") ./.'
854 # The predicate must be able to handle extra attributes
855 touch a
856 expectFailure 'toSource { root = ./.; fileset = fileFilter ({ name, type, hasExt }: true) ./.; }' 'called with unexpected argument '\''"lib.fileset.fileFilter: The predicate function passed as the first argument must be able to handle extra attributes for future compatibility. If you'\''re using `\{ name, file, hasExt \}:`, use `\{ name, file, hasExt, ... \}:` instead."'\'
857 rm -rf -- *
859 # .name is the name, and it works correctly, even recursively
860 tree=(
861 [a]=1
862 [b]=0
863 [c/a]=1
864 [c/b]=0
865 [d/c/a]=1
866 [d/c/b]=0
868 checkFileset 'fileFilter (file: file.name == "a") ./.'
869 tree=(
870 [a]=0
871 [b]=1
872 [c/a]=0
873 [c/b]=1
874 [d/c/a]=0
875 [d/c/b]=1
877 checkFileset 'fileFilter (file: file.name != "a") ./.'
879 # `.type` is the file type
880 mkdir d
881 touch d/a
882 ln -s d/b d/b
883 mkfifo d/c
884 expectEqual \
885 'toSource { root = ./.; fileset = fileFilter (file: file.type == "regular") ./.; }' \
886 'toSource { root = ./.; fileset = ./d/a; }'
887 expectEqual \
888 'toSource { root = ./.; fileset = fileFilter (file: file.type == "symlink") ./.; }' \
889 'toSource { root = ./.; fileset = ./d/b; }'
890 expectEqual \
891 'toSource { root = ./.; fileset = fileFilter (file: file.type == "unknown") ./.; }' \
892 'toSource { root = ./.; fileset = ./d/c; }'
893 expectEqual \
894 'toSource { root = ./.; fileset = fileFilter (file: file.type != "regular") ./.; }' \
895 'toSource { root = ./.; fileset = union ./d/b ./d/c; }'
896 expectEqual \
897 'toSource { root = ./.; fileset = fileFilter (file: file.type != "symlink") ./.; }' \
898 'toSource { root = ./.; fileset = union ./d/a ./d/c; }'
899 expectEqual \
900 'toSource { root = ./.; fileset = fileFilter (file: file.type != "unknown") ./.; }' \
901 'toSource { root = ./.; fileset = union ./d/a ./d/b; }'
902 rm -rf -- *
904 # Check that .hasExt checks for the file extension
905 # The empty extension is the same as a file ending with a .
906 tree=(
907 [a]=0
908 [a.]=1
909 [a.b]=0
910 [a.b.]=1
911 [a.b.c]=0
913 checkFileset 'fileFilter (file: file.hasExt "") ./.'
915 # It can check for the last extension
916 tree=(
917 [a]=0
918 [.a]=1
919 [.a.]=0
920 [.b.a]=1
921 [.b.a.]=0
923 checkFileset 'fileFilter (file: file.hasExt "a") ./.'
925 # It can check for any extension
926 tree=(
927 [a.b.c.d]=1
929 checkFileset 'fileFilter (file:
930 all file.hasExt [
931 "b.c.d"
932 "c.d"
935 ) ./.'
937 # It's lazy
938 tree=(
939 [b]=1
940 [c/a]=1
942 # Note that union evaluates the first argument first if necessary, that's why we can use ./c/a here
943 checkFileset 'union ./c/a (fileFilter (file: assert file.name != "a"; true) ./.)'
944 # but here we need to use ./c
945 checkFileset 'union (fileFilter (file: assert file.name != "a"; true) ./.) ./c'
947 # Make sure single files are filtered correctly
948 tree=(
949 [a]=1
950 [b]=0
952 checkFileset 'fileFilter (file: assert file.name == "a"; true) ./a'
953 tree=(
954 [a]=0
955 [b]=0
957 checkFileset 'fileFilter (file: assert file.name == "a"; false) ./a'
959 ## Tracing
961 # The second trace argument is returned
962 expectEqual 'trace ./. "some value"' 'builtins.trace "(empty)" "some value"'
964 # The fileset traceVal argument is returned
965 expectEqual 'traceVal ./.' 'builtins.trace "(empty)" (_create ./. "directory")'
967 # The tracing happens before the final argument is needed
968 expectEqual 'trace ./.' 'builtins.trace "(empty)" (x: x)'
970 # Tracing an empty directory shows it as such
971 expectTrace './.' '(empty)'
973 # This also works if there are directories, but all recursively without files
974 mkdir -p a/b/c
975 expectTrace './.' '(empty)'
976 rm -rf -- *
978 # The empty file set without a base also prints as empty
979 expectTrace '_emptyWithoutBase' '(empty)'
980 expectTrace 'unions [ ]' '(empty)'
981 mkdir foo bar
982 touch {foo,bar}/x
983 expectTrace 'intersection ./foo ./bar' '(empty)'
984 rm -rf -- *
986 # If a directory is fully included, print it as such
987 touch a
988 expectTrace './.' "$work"' (all files in directory)'
989 rm -rf -- *
991 # If a directory is not fully included, recurse
992 mkdir a b
993 touch a/{x,y} b/{x,y}
994 expectTrace 'union ./a/x ./b' "$work"'
996 - x (regular)
997 - b (all files in directory)'
998 rm -rf -- *
1000 # If an included path is a file, print its type
1001 touch a x
1002 ln -s a b
1003 mkfifo c
1004 expectTrace 'unions [ ./a ./b ./c ]' "$work"'
1005 - a (regular)
1006 - b (symlink)
1007 - c (unknown)'
1008 rm -rf -- *
1010 # Do not print directories without any files recursively
1011 mkdir -p a/b/c
1012 touch b x
1013 expectTrace 'unions [ ./a ./b ]' "$work"'
1014 - b (regular)'
1015 rm -rf -- *
1017 # If all children are either fully included or empty directories,
1018 # the parent should be printed as fully included
1019 touch a
1020 mkdir b
1021 expectTrace 'union ./a ./b' "$work"' (all files in directory)'
1022 rm -rf -- *
1024 mkdir -p x/b x/c
1025 touch x/a
1026 touch a
1027 # If all children are either fully excluded or empty directories,
1028 # the parent should be shown (or rather not shown) as fully excluded
1029 expectTrace 'unions [ ./a ./x/b ./x/c ]' "$work"'
1030 - a (regular)'
1031 rm -rf -- *
1033 # Completely filtered out directories also print as empty
1034 touch a
1035 expectTrace '_create ./. {}' '(empty)'
1036 rm -rf -- *
1038 # A general test to make sure the resulting format makes sense
1039 # Such as indentation and ordering
1040 mkdir -p bar/{qux,someDir}
1041 touch bar/{baz,qux,someDir/a} foo
1042 touch bar/qux/x
1043 ln -s x bar/qux/a
1044 mkfifo bar/qux/b
1045 expectTrace 'unions [
1046 ./bar/baz
1047 ./bar/qux/a
1048 ./bar/qux/b
1049 ./bar/someDir/a
1050 ./foo
1051 ]' "$work"'
1052 - bar
1053 - baz (regular)
1054 - qux
1055 - a (symlink)
1056 - b (unknown)
1057 - someDir (all files in directory)
1058 - foo (regular)'
1059 rm -rf -- *
1061 # For recursively included directories,
1062 # `(all files in directory)` should only be used if there's at least one file (otherwise it would be `(empty)`)
1063 # and this should be determined without doing a full search
1065 # a is intentionally ordered first here in order to allow triggering the short-circuit behavior
1066 # We then check that b is not read
1067 # In a more realistic scenario, some directories might need to be recursed into,
1068 # but a file would be quickly found to trigger the short-circuit.
1069 touch a
1070 mkdir b
1071 # We don't have lambda's in bash unfortunately,
1072 # so we just define a function instead and then pass its name
1073 # shellcheck disable=SC2317
1074 run() {
1075 # This shouldn't read b/
1076 expectTrace './.' "$work"' (all files in directory)'
1077 # Remove all files immediately after, triggering delete_self events for all of them
1078 rmdir b
1080 # Runs the function while checking that b isn't read
1081 withFileMonitor run b
1082 rm -rf -- *
1084 # Partially included directories trace entries as they are evaluated
1085 touch a b c
1086 expectTrace '_create ./. { a = null; b = "regular"; c = throw "b"; }' "$work"'
1087 - b (regular)'
1089 # Except entries that need to be evaluated to even figure out if it's only partially included:
1090 # Here the directory could be fully excluded or included just from seeing a and b,
1091 # so c needs to be evaluated before anything can be traced
1092 expectTrace '_create ./. { a = null; b = null; c = throw "c"; }' ''
1093 expectTrace '_create ./. { a = "regular"; b = "regular"; c = throw "c"; }' ''
1094 rm -rf -- *
1096 # We can trace large directories (10000 here) without any problems
1097 filesToCreate=({0..9}{0..9}{0..9}{0..9})
1098 expectedTrace=$work$'\n'$(printf -- '- %s (regular)\n' "${filesToCreate[@]}")
1099 # We need an excluded file so it doesn't print as `(all files in directory)`
1100 touch 0 "${filesToCreate[@]}"
1101 expectTrace 'unions (mapAttrsToList (n: _: ./. + "/${n}") (removeAttrs (builtins.readDir ./.) [ "0" ]))' "$expectedTrace"
1102 rm -rf -- *
1104 ## lib.fileset.fromSource
1106 # Check error messages
1108 # String-like values are not supported
1109 expectFailure 'fromSource (lib.cleanSource "")' 'lib.fileset.fromSource: The source origin of the argument is a string-like value \(""\), but it should be a path instead.
1110 [[:blank:]]*Sources created from paths in strings cannot be turned into file sets, use `lib.sources` or derivations instead.'
1112 # Wrong type
1113 expectFailure 'fromSource null' 'lib.fileset.fromSource: The source origin of the argument is of type null, but it should be a path instead.'
1114 expectFailure 'fromSource (lib.cleanSource null)' 'lib.fileset.fromSource: The source origin of the argument is of type null, but it should be a path instead.'
1116 # fromSource on non-existent paths gives an error
1117 expectFailure 'fromSource ./a' 'lib.fileset.fromSource: The source origin \('"$work"'/a\) of the argument is a path that does not exist.'
1119 # fromSource on a path works and is the same as coercing that path
1120 mkdir a
1121 touch a/b c
1122 expectEqual 'trace (fromSource ./.) null' 'trace ./. null'
1123 rm -rf -- *
1125 # Check that converting to a file set doesn't read the included files
1126 mkdir a
1127 touch a/b
1128 run() {
1129 expectEqual "trace (fromSource (lib.cleanSourceWith { src = ./a; })) null" "builtins.trace \"$work/a (all files in directory)\" null"
1130 rm a/b
1132 withFileMonitor run a/b
1133 rm -rf -- *
1135 # Check that converting to a file set doesn't read entries for directories that are filtered out
1136 mkdir -p a/b
1137 touch a/b/c
1138 run() {
1139 expectEqual "trace (fromSource (lib.cleanSourceWith {
1140 src = ./a;
1141 filter = pathString: type: false;
1142 })) null" "builtins.trace \"(empty)\" null"
1143 rm a/b/c
1144 rmdir a/b
1146 withFileMonitor run a/b
1147 rm -rf -- *
1149 # The filter is not needed on empty directories
1150 expectEqual 'trace (fromSource (lib.cleanSourceWith {
1151 src = ./.;
1152 filter = abort "filter should not be needed";
1153 })) null' 'trace _emptyWithoutBase null'
1155 # Single files also work
1156 touch a b
1157 expectEqual 'trace (fromSource (cleanSourceWith { src = ./a; })) null' 'trace ./a null'
1158 rm -rf -- *
1160 # For a tree assigning each subpath true/false,
1161 # check whether a source filter with those results includes the same files
1162 # as a file set created using fromSource. Usage:
1164 # tree=(
1165 # [a]=1 # ./a is a file and the filter should return true for it
1166 # [b/]=0 # ./b is a directory and the filter should return false for it
1168 # checkSource
1169 checkSource() {
1170 createTree
1172 # Serialise the tree as JSON (there's only minimal savings with jq,
1173 # and we don't need to handle escapes)
1175 echo "{"
1176 first=1
1177 for p in "${!tree[@]}"; do
1178 if [[ -z "$first" ]]; then
1179 echo ","
1180 else
1181 first=
1183 echo "\"$p\":"
1184 case "${tree[$p]}" in
1186 echo "true"
1189 echo "false"
1192 die "Unsupported tree value: ${tree[$p]}"
1193 esac
1194 done
1195 echo "}"
1196 } > "$tmp/tree.json"
1198 # An expression to create a source value with a filter matching the tree
1199 sourceExpr='
1201 tree = importJSON '"$tmp"'/tree.json;
1203 cleanSourceWith {
1204 src = ./.;
1205 filter =
1206 pathString: type:
1208 stripped = removePrefix (toString ./. + "/") pathString;
1209 key = stripped + optionalString (type == "directory") "/";
1211 tree.${key} or
1212 (throw "tree key ${key} missing");
1216 filesetExpr='
1217 toSource {
1218 root = ./.;
1219 fileset = fromSource ('"$sourceExpr"');
1223 # Turn both into store paths
1224 sourceStorePath=$(expectStorePath "$sourceExpr")
1225 filesetStorePath=$(expectStorePath "$filesetExpr")
1227 # Loop through each path in the tree
1228 while IFS= read -r -d $'\0' subpath; do
1229 if [[ ! -e "$sourceStorePath"/"$subpath" ]]; then
1230 # If it's not in the source store path, it's also not in the file set store path
1231 if [[ -e "$filesetStorePath"/"$subpath" ]]; then
1232 die "The store path $sourceStorePath created by $expr doesn't contain $subpath, but the corresponding store path $filesetStorePath created via fromSource does contain $subpath"
1234 elif [[ -z "$(find "$sourceStorePath"/"$subpath" -type f)" ]]; then
1235 # If it's an empty directory in the source store path, it shouldn't be in the file set store path
1236 if [[ -e "$filesetStorePath"/"$subpath" ]]; then
1237 die "The store path $sourceStorePath created by $expr contains the path $subpath without any files, but the corresponding store path $filesetStorePath created via fromSource didn't omit it"
1239 else
1240 # If it's non-empty directory or a file, it should be in the file set store path
1241 if [[ ! -e "$filesetStorePath"/"$subpath" ]]; then
1242 die "The store path $sourceStorePath created by $expr contains the non-empty path $subpath, but the corresponding store path $filesetStorePath created via fromSource doesn't include it"
1245 done < <(find . -mindepth 1 -print0)
1247 rm -rf -- *
1250 # Check whether the filter is evaluated correctly
1251 tree=(
1252 [a]=
1253 [b/]=
1254 [b/c]=
1255 [b/d]=
1256 [e/]=
1257 [e/e/]=
1259 # We fill out the above tree values with all possible combinations of 0 and 1
1260 # Then check whether a filter based on those return values gets turned into the corresponding file set
1261 for i in $(seq 0 $((2 ** ${#tree[@]} - 1 ))); do
1262 for p in "${!tree[@]}"; do
1263 tree[$p]=$(( i % 2 ))
1264 (( i /= 2 )) || true
1265 done
1266 checkSource
1267 done
1269 # The filter is called with the same arguments in the same order
1270 mkdir a e
1271 touch a/b a/c d e
1272 expectEqual '
1273 trace (fromSource (cleanSourceWith {
1274 src = ./.;
1275 filter = pathString: type: builtins.trace "${pathString} ${toString type}" true;
1276 })) null
1278 builtins.seq (cleanSourceWith {
1279 src = ./.;
1280 filter = pathString: type: builtins.trace "${pathString} ${toString type}" true;
1281 }).outPath
1282 builtins.trace "'"$work"' (all files in directory)"
1283 null
1285 rm -rf -- *
1287 # Test that if a directory is not included, the filter isn't called on its contents
1288 mkdir a b
1289 touch a/c b/d
1290 expectEqual 'trace (fromSource (cleanSourceWith {
1291 src = ./.;
1292 filter = pathString: type:
1293 if pathString == toString ./a then
1294 false
1295 else if pathString == toString ./b then
1296 true
1297 else if pathString == toString ./b/d then
1298 true
1299 else
1300 abort "This filter should not be called with path ${pathString}";
1301 })) null' 'trace (_create ./. { b = "directory"; }) null'
1302 rm -rf -- *
1304 # The filter is called lazily:
1305 # If a later say intersection removes a part of the tree, the filter won't run on it
1306 mkdir a d
1307 touch a/{b,c} d/e
1308 expectEqual 'trace (intersection ./a (fromSource (lib.cleanSourceWith {
1309 src = ./.;
1310 filter = pathString: type:
1311 if pathString == toString ./a || pathString == toString ./a/b then
1312 true
1313 else if pathString == toString ./a/c then
1314 false
1315 else
1316 abort "filter should not be called on ${pathString}";
1317 }))) null' 'trace ./a/b null'
1318 rm -rf -- *
1320 ## lib.fileset.gitTracked/gitTrackedWith
1322 # The first/second argument has to be a path
1323 expectFailure 'gitTracked null' 'lib.fileset.gitTracked: Expected the argument to be a path, but it'\''s a null instead.'
1324 expectFailure 'gitTrackedWith {} null' 'lib.fileset.gitTrackedWith: Expected the second argument to be a path, but it'\''s a null instead.'
1326 # The path must be a directory
1327 touch a
1328 expectFailure 'gitTracked ./a' 'lib.fileset.gitTracked: Expected the argument \('"$work"'/a\) to be a directory, but it'\''s a file instead'
1329 expectFailure 'gitTrackedWith {} ./a' 'lib.fileset.gitTrackedWith: Expected the second argument \('"$work"'/a\) to be a directory, but it'\''s a file instead'
1330 rm -rf -- *
1332 # The path has to contain a .git directory
1333 expectFailure 'gitTracked ./.' 'lib.fileset.gitTracked: Expected the argument \('"$work"'\) to point to a local working tree of a Git repository, but it'\''s not.'
1334 expectFailure 'gitTrackedWith {} ./.' 'lib.fileset.gitTrackedWith: Expected the second argument \('"$work"'\) to point to a local working tree of a Git repository, but it'\''s not.'
1336 # recurseSubmodules has to be a boolean
1337 expectFailure 'gitTrackedWith { recurseSubmodules = null; } ./.' 'lib.fileset.gitTrackedWith: Expected the attribute `recurseSubmodules` of the first argument to be a boolean, but it'\''s a null instead.'
1339 # recurseSubmodules = true is not supported on all Nix versions
1340 if [[ "$(nix-instantiate --eval --expr "$prefixExpression (versionAtLeast builtins.nixVersion _fetchGitSubmodulesMinver)")" == true ]]; then
1341 fetchGitSupportsSubmodules=1
1342 else
1343 fetchGitSupportsSubmodules=
1344 expectFailure 'gitTrackedWith { recurseSubmodules = true; } ./.' 'lib.fileset.gitTrackedWith: Setting the attribute `recurseSubmodules` to `true` is only supported for Nix version 2.4 and after, but Nix version [0-9.]+ is used.'
1347 # Checks that `gitTrackedWith` contains the same files as `git ls-files`
1348 # for the current working directory.
1349 # If --recurse-submodules is passed, the flag is passed through to `git ls-files`
1350 # and as `recurseSubmodules` to `gitTrackedWith`
1351 checkGitTrackedWith() {
1352 if [[ "${1:-}" == "--recurse-submodules" ]]; then
1353 gitLsFlags="--recurse-submodules"
1354 gitTrackedArg="{ recurseSubmodules = true; }"
1355 else
1356 gitLsFlags=""
1357 gitTrackedArg="{ }"
1360 # All files listed by `git ls-files`
1361 expectedFiles=()
1362 while IFS= read -r -d $'\0' file; do
1363 # If there are submodules but --recurse-submodules isn't passed,
1364 # `git ls-files` lists them as empty directories,
1365 # we need to filter that out since we only want to check/count files
1366 if [[ -f "$file" ]]; then
1367 expectedFiles+=("$file")
1369 done < <(git ls-files -z $gitLsFlags)
1371 storePath=$(expectStorePath 'toSource { root = ./.; fileset = gitTrackedWith '"$gitTrackedArg"' ./.; }')
1373 # Check that each expected file is also in the store path with the same content
1374 for expectedFile in "${expectedFiles[@]}"; do
1375 if [[ ! -e "$storePath"/"$expectedFile" ]]; then
1376 die "Expected file $expectedFile to exist in $storePath, but it doesn't.\nGit status:\n$(git status)\nStore path contents:\n$(find "$storePath")"
1378 if ! diff "$expectedFile" "$storePath"/"$expectedFile"; then
1379 die "Expected file $expectedFile to have the same contents as in $storePath, but it doesn't.\nGit status:\n$(git status)\nStore path contents:\n$(find "$storePath")"
1381 done
1383 # This is a cheap way to verify the inverse: That all files in the store path are also expected
1384 # We just count the number of files in both and verify they're the same
1385 actualFileCount=$(find "$storePath" -type f -printf . | wc -c)
1386 if [[ "${#expectedFiles[@]}" != "$actualFileCount" ]]; then
1387 die "Expected ${#expectedFiles[@]} files in $storePath, but got $actualFileCount.\nGit status:\n$(git status)\nStore path contents:\n$(find "$storePath")"
1392 # Runs checkGitTrackedWith with and without --recurse-submodules
1393 # Allows testing both variants together
1394 checkGitTracked() {
1395 checkGitTrackedWith
1396 if [[ -n "$fetchGitSupportsSubmodules" ]]; then
1397 checkGitTrackedWith --recurse-submodules
1401 createGitRepo() {
1402 git init -q "$1"
1403 # Only repo-local config
1404 git -C "$1" config user.name "Nixpkgs"
1405 git -C "$1" config user.email "nixpkgs@nixos.org"
1406 # Get at least a HEAD commit, needed for older Nix versions
1407 git -C "$1" commit -q --allow-empty -m "Empty commit"
1410 # Check that gitTracked[With] works as expected when evaluated out-of-tree
1412 ## First we create a git repositories (and a subrepository) with `default.nix` files referring to their local paths
1413 ## Simulating how it would be used in the wild
1414 createGitRepo .
1415 echo '{ fs }: fs.toSource { root = ./.; fileset = fs.gitTracked ./.; }' > default.nix
1416 git add .
1418 ## We can evaluate it locally just fine, `fetchGit` is used underneath to filter git-tracked files
1419 expectEqual '(import ./. { fs = lib.fileset; }).outPath' '(builtins.fetchGit ./.).outPath'
1421 ## We can also evaluate when importing from fetched store paths
1422 storePath=$(expectStorePath 'builtins.fetchGit ./.')
1423 expectEqual '(import '"$storePath"' { fs = lib.fileset; }).outPath' \""$storePath"\"
1425 ## But it fails if the path is imported with a fetcher that doesn't remove .git (like just using "${./.}")
1426 expectFailure 'import "${./.}" { fs = lib.fileset; }' 'lib.fileset.gitTracked: The argument \(.*\) is a store path within a working tree of a Git repository.
1427 [[:blank:]]*This indicates that a source directory was imported into the store using a method such as `import "\$\{./.\}"` or `path:.`.
1428 [[:blank:]]*This function currently does not support such a use case, since it currently relies on `builtins.fetchGit`.
1429 [[:blank:]]*You could make this work by using a fetcher such as `fetchGit` instead of copying the whole repository.
1430 [[:blank:]]*If you can'\''t avoid copying the repo to the store, see https://github.com/NixOS/nix/issues/9292.'
1432 ## Even with submodules
1433 if [[ -n "$fetchGitSupportsSubmodules" ]]; then
1434 ## Both the main repo with the submodule
1435 echo '{ fs }: fs.toSource { root = ./.; fileset = fs.gitTrackedWith { recurseSubmodules = true; } ./.; }' > default.nix
1436 createGitRepo sub
1437 git submodule add ./sub sub >/dev/null
1438 ## But also the submodule itself
1439 echo '{ fs }: fs.toSource { root = ./.; fileset = fs.gitTracked ./.; }' > sub/default.nix
1440 git -C sub add .
1442 ## We can evaluate it locally just fine, `fetchGit` is used underneath to filter git-tracked files
1443 expectEqual '(import ./. { fs = lib.fileset; }).outPath' '(builtins.fetchGit { url = ./.; submodules = true; }).outPath'
1444 expectEqual '(import ./sub { fs = lib.fileset; }).outPath' '(builtins.fetchGit ./sub).outPath'
1446 ## We can also evaluate when importing from fetched store paths
1447 storePathWithSub=$(expectStorePath 'builtins.fetchGit { url = ./.; submodules = true; }')
1448 expectEqual '(import '"$storePathWithSub"' { fs = lib.fileset; }).outPath' \""$storePathWithSub"\"
1449 storePathSub=$(expectStorePath 'builtins.fetchGit ./sub')
1450 expectEqual '(import '"$storePathSub"' { fs = lib.fileset; }).outPath' \""$storePathSub"\"
1452 ## But it fails if the path is imported with a fetcher that doesn't remove .git (like just using "${./.}")
1453 expectFailure 'import "${./.}" { fs = lib.fileset; }' 'lib.fileset.gitTrackedWith: The second argument \(.*\) is a store path within a working tree of a Git repository.
1454 [[:blank:]]*This indicates that a source directory was imported into the store using a method such as `import "\$\{./.\}"` or `path:.`.
1455 [[:blank:]]*This function currently does not support such a use case, since it currently relies on `builtins.fetchGit`.
1456 [[:blank:]]*You could make this work by using a fetcher such as `fetchGit` instead of copying the whole repository.
1457 [[:blank:]]*If you can'\''t avoid copying the repo to the store, see https://github.com/NixOS/nix/issues/9292.'
1458 expectFailure 'import "${./.}/sub" { fs = lib.fileset; }' 'lib.fileset.gitTracked: The argument \(.*/sub\) is a store path within a working tree of a Git repository.
1459 [[:blank:]]*This indicates that a source directory was imported into the store using a method such as `import "\$\{./.\}"` or `path:.`.
1460 [[:blank:]]*This function currently does not support such a use case, since it currently relies on `builtins.fetchGit`.
1461 [[:blank:]]*You could make this work by using a fetcher such as `fetchGit` instead of copying the whole repository.
1462 [[:blank:]]*If you can'\''t avoid copying the repo to the store, see https://github.com/NixOS/nix/issues/9292.'
1464 rm -rf -- *
1466 # shallow = true is not supported on all Nix versions
1467 # and older versions don't support shallow clones at all
1468 if [[ "$(nix-instantiate --eval --expr "$prefixExpression (versionAtLeast builtins.nixVersion _fetchGitShallowMinver)")" == true ]]; then
1469 createGitRepo full
1470 # Extra commit such that there's a commit that won't be in the shallow clone
1471 git -C full commit --allow-empty -q -m extra
1472 git clone -q --depth 1 "file://${PWD}/full" shallow
1473 cd shallow
1474 checkGitTracked
1475 cd ..
1476 rm -rf -- *
1479 # Go through all stages of Git files
1480 # See https://www.git-scm.com/book/en/v2/Git-Basics-Recording-Changes-to-the-Repository
1482 # Empty repository
1483 createGitRepo .
1484 checkGitTracked
1486 # Untracked file
1487 echo a > a
1488 checkGitTracked
1490 # Staged file
1491 git add a
1492 checkGitTracked
1494 # Committed file
1495 git commit -q -m "Added a"
1496 checkGitTracked
1498 # Edited file
1499 echo b > a
1500 checkGitTracked
1502 # Removed file
1503 git rm -f -q a
1504 checkGitTracked
1506 rm -rf -- *
1508 # gitignored file
1509 createGitRepo .
1510 echo a > .gitignore
1511 touch a
1512 git add -A
1513 checkGitTracked
1515 # Add it regardless (needs -f)
1516 git add -f a
1517 checkGitTracked
1518 rm -rf -- *
1520 # Directory
1521 createGitRepo .
1522 mkdir -p d1/d2/d3
1523 touch d1/d2/d3/a
1524 git add d1
1525 checkGitTracked
1526 rm -rf -- *
1528 # Submodules
1529 createGitRepo .
1530 createGitRepo sub
1532 # Untracked submodule
1533 git -C sub commit -q --allow-empty -m "Empty commit"
1534 checkGitTracked
1536 # Tracked submodule
1537 git submodule add ./sub sub >/dev/null
1538 checkGitTracked
1540 # Untracked file
1541 echo a > sub/a
1542 checkGitTracked
1544 # Staged file
1545 git -C sub add a
1546 checkGitTracked
1548 # Committed file
1549 git -C sub commit -q -m "Add a"
1550 checkGitTracked
1552 # Changed file
1553 echo b > sub/b
1554 checkGitTracked
1556 # Removed file
1557 git -C sub rm -f -q a
1558 checkGitTracked
1560 rm -rf -- *
1562 ## lib.fileset.maybeMissing
1564 # Argument must be a path
1565 expectFailure 'maybeMissing "someString"' 'lib.fileset.maybeMissing: Argument \("someString"\) is a string-like value, but it should be a path instead.'
1566 expectFailure 'maybeMissing null' 'lib.fileset.maybeMissing: Argument is of type null, but it should be a path instead.'
1568 tree=(
1570 checkFileset 'maybeMissing ./a'
1571 checkFileset 'maybeMissing ./b'
1572 checkFileset 'maybeMissing ./b/c'
1574 # Works on single files
1575 tree=(
1576 [a]=1
1577 [b/c]=0
1578 [b/d]=0
1580 checkFileset 'maybeMissing ./a'
1581 tree=(
1582 [a]=0
1583 [b/c]=1
1584 [b/d]=0
1586 checkFileset 'maybeMissing ./b/c'
1588 # Works on directories
1589 tree=(
1590 [a]=0
1591 [b/c]=1
1592 [b/d]=1
1594 checkFileset 'maybeMissing ./b'
1596 # TODO: Once we have combinators and a property testing library, derive property tests from https://en.wikipedia.org/wiki/Algebra_of_sets
1598 echo >&2 tests ok