2 # shellcheck disable=SC2016
3 # shellcheck disable=SC2317
4 # shellcheck disable=SC2192
8 # [nixpkgs]$ lib/fileset/tests.sh
10 # [nixpkgs]$ nix-build lib/tests/release.nix
13 shopt -s inherit_errexit dotglob
16 # The second to last entry contains the line number of the top-level caller
17 lineIndex
=$
(( ${#BASH_LINENO[@]} - 2 ))
18 echo >&2 -e "test case at ${BASH_SOURCE[0]}:${BASH_LINENO[$lineIndex]} failed:" "$@"
22 if test -n "${TEST_LIB:-}"; then
23 NIX_PATH
=nixpkgs
="$(dirname "$TEST_LIB")"
25 NIX_PATH
=nixpkgs
="$(cd "$
(dirname "${BASH_SOURCE[0]}")/..
/..
"; pwd)"
33 trap clean_up EXIT SIGINT SIGTERM
38 # Crudely unquotes a JSON string by just taking everything between the first and the second quote.
39 # We're only using this for resulting /nix/store paths, which can't contain " anyways,
40 # nor can they contain any other characters that would need to be escaped specially in JSON
41 # This way we don't need to add a dependency on e.g. jq
48 lib = import <nixpkgs/lib>;
49 internal = import <nixpkgs/lib/fileset/internal.nix> {
58 # Check that two nix expression successfully evaluate to the same value.
59 # The expressions have `lib.fileset` in scope.
60 # Usage: expectEqual NIX NIX
64 if actualResult
=$
(nix-instantiate
--eval --strict --show-trace 2>"$tmp"/actualStderr \
65 --expr "$prefixExpression ($actualExpr)"); then
70 actualStderr
=$
(< "$tmp"/actualStderr
)
72 if expectedResult
=$
(nix-instantiate
--eval --strict --show-trace 2>"$tmp"/expectedStderr \
73 --expr "$prefixExpression ($expectedExpr)"); then
78 expectedStderr
=$
(< "$tmp"/expectedStderr
)
80 if [[ "$actualExitCode" != "$expectedExitCode" ]]; then
81 echo "$actualStderr" >&2
82 echo "$actualResult" >&2
83 die
"$actualExpr should have exited with $expectedExitCode, but it exited with $actualExitCode"
86 if [[ "$actualResult" != "$expectedResult" ]]; then
87 die
"$actualExpr should have evaluated to $expectedExpr:\n$expectedResult\n\nbut it evaluated to\n$actualResult"
90 if [[ "$actualStderr" != "$expectedStderr" ]]; then
91 die
"$actualExpr should have had this on stderr:\n$expectedStderr\n\nbut it was\n$actualStderr"
95 # Check that a nix expression evaluates successfully to a store path and returns it (without quotes).
96 # The expression has `lib.fileset` in scope.
97 # Usage: expectStorePath NIX
100 if ! result
=$
(nix-instantiate
--eval --strict --json --read-write-mode --show-trace 2>"$tmp"/stderr \
101 --expr "$prefixExpression ($expr)"); then
102 cat "$tmp/stderr" >&2
103 die
"$expr failed to evaluate, but it was expected to succeed"
105 # This is safe because we assume to get back a store path in a string
106 crudeUnquoteJSON
<<< "$result"
109 # Check that a nix expression fails to evaluate (strictly, read-write-mode).
110 # And check the received stderr against a regex
111 # The expression has `lib.fileset` in scope.
112 # Usage: expectFailure NIX REGEX
115 local expectedErrorRegex
=$2
116 if result
=$
(nix-instantiate
--eval --strict --read-write-mode --show-trace 2>"$tmp/stderr" \
117 --expr "$prefixExpression $expr"); then
118 die
"$expr evaluated successfully to $result, but it was expected to fail"
120 stderr
=$
(<"$tmp/stderr")
121 if [[ ! "$stderr" =~
$expectedErrorRegex ]]; then
122 die
"$expr should have errored with this regex pattern:\n\n$expectedErrorRegex\n\nbut this was the actual error:\n\n$stderr"
126 # Check that the traces of a Nix expression are as expected when evaluated.
127 # The expression has `lib.fileset` in scope.
128 # Usage: expectTrace NIX STR
131 local expectedTrace
=$2
133 nix-instantiate
--eval --show-trace >/dev
/null
2>"$tmp"/stderrTrace \
134 --expr "$prefixExpression trace ($expr)" || true
136 actualTrace
=$
(sed -n 's/^trace: //p' "$tmp/stderrTrace")
138 nix-instantiate
--eval --show-trace >/dev
/null
2>"$tmp"/stderrTraceVal \
139 --expr "$prefixExpression traceVal ($expr)" || true
141 actualTraceVal
=$
(sed -n 's/^trace: //p' "$tmp/stderrTraceVal")
143 # Test that traceVal returns the same trace as trace
144 if [[ "$actualTrace" != "$actualTraceVal" ]]; then
145 cat "$tmp"/stderrTrace
>&2
146 die
"$expr traced this for lib.fileset.trace:\n\n$actualTrace\n\nand something different for lib.fileset.traceVal:\n\n$actualTraceVal"
149 if [[ "$actualTrace" != "$expectedTrace" ]]; then
150 cat "$tmp"/stderrTrace
>&2
151 die
"$expr should have traced this:\n\n$expectedTrace\n\nbut this was actually traced:\n\n$actualTrace"
155 # We conditionally use inotifywait in withFileMonitor.
156 # Check early whether it's available
157 # TODO: Darwin support, though not crucial since we have Linux CI
158 if type inotifywait
2>/dev
/null
>/dev
/null
; then
161 echo "Warning: Cannot check for paths not getting read since the inotifywait command (from the inotify-tools package) is not available" >&2
165 # Run a function while monitoring that it doesn't read certain paths
166 # Usage: withFileMonitor FUNNAME PATH...
167 # - FUNNAME should be a bash function that:
168 # - Performs some operation that should not read some paths
169 # - Delete the paths it shouldn't read without triggering any open events
170 # - PATH... are the paths that should not get read
172 # This function outputs the same as FUNNAME
177 # If we can't monitor files or have none to monitor, just run the function directly
178 if [[ -z "$canMonitor" ]] ||
(( "$#" == 0 )); then
182 # Use a subshell to start the coprocess in and use a trap to kill it when exiting the subshell
184 # Assigned by coproc, makes shellcheck happy
185 local watcher watcher_PID
187 # Start inotifywait in the background to monitor all excluded paths
189 # inotifywait outputs a string on stderr when ready
190 # Redirect it to stdout so we can access it from the coproc's stdout fd
191 # exec so that the coprocess is inotify itself, making the kill below work correctly
192 # See below why we listen to both open and delete_self events
193 exec inotifywait
--format='%e %w' --event open
,delete_self
--monitor "$@" 2>&1
196 # This will trigger when this subshell exits, no matter if successful or not
197 # After exiting the subshell, the parent shell will continue executing
198 trap 'kill "${watcher_PID}"' exit
200 # Synchronously wait until inotifywait is ready
201 while read -r -u "${watcher[0]}" line
&& [[ "$line" != "Watches established." ]]; do
205 # Call the function that should not read the given paths and delete them afterwards
208 # Get the first event
209 read -r -u "${watcher[0]}" event
file
211 # With funName potentially reading files first before deleting them,
212 # there's only these two possible event timelines:
213 # - open*, ..., open*, delete_self, ..., delete_self: If some excluded paths were read
214 # - delete_self, ..., delete_self: If no excluded paths were read
215 # So by looking at the first event we can figure out which one it is!
216 # This also means we don't have to wait to collect all events.
219 die
"$funName opened excluded file $file when it shouldn't have"
225 die
"During $funName, Unexpected event type '$event' on file $file that should be excluded"
233 # Create the tree structure declared in the tree variable, usage:
236 # [a/b] = # Declare that file a/b should exist
237 # [c/a] = # Declare that file c/a should exist
238 # [c/d/]= # Declare that directory c/d/ should exist
243 # Track which paths need to be created
244 local -a dirsToCreate
=()
245 local -a filesToCreate
=()
246 for p
in "${!tree[@]}"; do
247 # If keys end with a `/` we treat them as directories, otherwise files
248 if [[ "$p" =~
/$
]]; then
251 filesToCreate
+=("$p")
255 # Create all the necessary paths.
256 # This is done with only a fixed number of processes,
257 # in order to not be too slow
258 # Though this does mean we're a bit limited with how many files can be created
259 if (( ${#dirsToCreate[@]} != 0 )); then
260 mkdir
-p "${dirsToCreate[@]}"
262 if (( ${#filesToCreate[@]} != 0 )); then
263 readarray
-d '' -t parentsToCreate
< <(dirname -z "${filesToCreate[@]}")
264 mkdir
-p "${parentsToCreate[@]}"
265 touch "${filesToCreate[@]}"
269 # Check whether a file set includes/excludes declared paths as expected, usage:
272 # [a/b] =1 # Declare that file a/b should exist and expect it to be included in the store path
273 # [c/a] = # Declare that file c/a should exist and expect it to be excluded in the store path
274 # [c/d/]= # Declare that directory c/d/ should exist and expect it to be excluded in the store path
276 # checkFileset './a' # Pass the fileset as the argument
283 # Process the tree into separate arrays for included paths, excluded paths and excluded files.
285 local -a includedFiles
=()
287 local -a excludedFiles
=()
288 for p
in "${!tree[@]}"; do
289 case "${tree[$p]}" in
292 # If keys end with a `/` we treat them as directories, otherwise files
293 if [[ ! "$p" =~
/$
]]; then
294 includedFiles
+=("$p")
299 if [[ ! "$p" =~
/$
]]; then
300 excludedFiles
+=("$p")
304 die
"Unsupported tree value: ${tree[$p]}"
308 # Test that lib.fileset.toList contains exactly the included files.
309 # The /#/./ part prefixes each element with `./`
310 expectEqual
"toList ($fileset)" "sort lessThan [ ${includedFiles[*]/#/./} ]"
312 expression
="toSource { root = ./.; fileset = $fileset; }"
314 # We don't have lambda's in bash unfortunately,
315 # so we just define a function instead and then pass its name
316 # shellcheck disable=SC2317
318 # Call toSource with the fileset, triggering open events for all files that are added to the store
319 expectStorePath
"$expression"
320 if (( ${#excludedFiles[@]} != 0 )); then
321 rm "${excludedFiles[@]}"
325 # Runs the function while checking that the given excluded files aren't read
326 storePath
=$
(withFileMonitor run
"${excludedFiles[@]}")
328 # For each path that should be included, make sure it does occur in the resulting store path
329 for p
in "${included[@]}"; do
330 if [[ ! -e "$storePath/$p" ]]; then
331 die
"$expression doesn't include path $p when it should have"
335 # For each path that should be excluded, make sure it doesn't occur in the resulting store path
336 for p
in "${excluded[@]}"; do
337 if [[ -e "$storePath/$p" ]]; then
338 die
"$expression included path $p when it shouldn't have"
346 #### Error messages #####
348 # We're using [[:blank:]] here instead of \s, because only the former is POSIX
349 # (see https://pubs.opengroup.org/onlinepubs/007908799/xbd/re.html#tag_007_003_005).
350 # And indeed, Darwin's bash only supports the former
352 # Absolute paths in strings cannot be passed as `root`
353 expectFailure
'toSource { root = "/nix/store/foobar"; fileset = ./.; }' 'lib.fileset.toSource: `root` \(/nix/store/foobar\) is a string-like value, but it should be a path instead.
354 [[:blank:]]*Paths in strings are not supported by `lib.fileset`, use `lib.sources` or derivations instead.'
356 expectFailure
'toSource { root = cleanSourceWith { src = ./.; }; fileset = ./.; }' 'lib.fileset.toSource: `root` is a `lib.sources`-based value, but it should be a path instead.
357 [[:blank:]]*To use a `lib.sources`-based value, convert it to a file set using `lib.fileset.fromSource` and pass it as `fileset`.
358 [[:blank:]]*Note that this only works for sources created from paths.'
360 # Only paths are accepted as `root`
361 expectFailure
'toSource { root = 10; fileset = ./.; }' 'lib.fileset.toSource: `root` is of type int, but it should be a path instead.'
363 # Different filesystem roots in root and fileset are not supported
364 mkdir
-p {foo
,bar
}/mock-root
365 expectFailure
'with ((import <nixpkgs/lib>).extend (import <nixpkgs/lib/fileset/mock-splitRoot.nix>)).fileset;
366 toSource { root = ./foo/mock-root; fileset = ./bar/mock-root; }
367 ' 'lib.fileset.toSource: Filesystem roots are not the same for `fileset` and `root` \('"$work"'/foo/mock-root\):
368 [[:blank:]]*`root`: Filesystem root is "'"$work"'/foo/mock-root"
369 [[:blank:]]*`fileset`: Filesystem root is "'"$work"'/bar/mock-root"
370 [[:blank:]]*Different filesystem roots are not supported.'
373 # `root` needs to exist
374 expectFailure
'toSource { root = ./a; fileset = ./.; }' 'lib.fileset.toSource: `root` \('"$work"'/a\) is a path that does not exist.'
376 # `root` needs to be a file
378 expectFailure
'toSource { root = ./a; fileset = ./a; }' 'lib.fileset.toSource: `root` \('"$work"'/a\) is a file, but it should be a directory instead. Potential solutions:
379 [[:blank:]]*- If you want to import the file into the store _without_ a containing directory, use string interpolation or `builtins.path` instead of this function.
380 [[:blank:]]*- If you want to import the file into the store _with_ a containing directory, set `root` to the containing directory, such as '"$work"', and set `fileset` to the file path.'
383 # The fileset argument should be evaluated, even if the directory is empty
384 expectFailure
'toSource { root = ./.; fileset = abort "This should be evaluated"; }' 'evaluation aborted with the following error message: '\''This should be evaluated'\'
386 # Only paths under `root` should be able to influence the result
388 expectFailure
'toSource { root = ./a; fileset = ./.; }' 'lib.fileset.toSource: `fileset` could contain files in '"$work"', which is not under the `root` \('"$work"'/a\). Potential solutions:
389 [[:blank:]]*- Set `root` to '"$work"' or any directory higher up. This changes the layout of the resulting store path.
390 [[:blank:]]*- Set `fileset` to a file set that cannot contain files outside the `root` \('"$work"'/a\). This could change the files included in the result.'
393 # non-regular and non-symlink files cannot be added to the Nix store
395 expectFailure
'toSource { root = ./.; fileset = ./a; }' 'lib.fileset.toSource: `fileset` contains a file that cannot be added to the store: '"$work"'/a
396 [[:blank:]]*This file is neither a regular file nor a symlink, the only file types supported by the Nix store.
397 [[:blank:]]*Therefore the file set cannot be added to the Nix store as is. Make sure to not include that file to avoid this error.'
400 # Path coercion only works for paths
401 expectFailure
'toSource { root = ./.; fileset = 10; }' 'lib.fileset.toSource: `fileset` is of type int, but it should be a file set or a path instead.'
402 expectFailure
'toSource { root = ./.; fileset = "/some/path"; }' 'lib.fileset.toSource: `fileset` \("/some/path"\) is a string-like value, but it should be a file set or a path instead.
403 [[:blank:]]*Paths represented as strings are not supported by `lib.fileset`, use `lib.sources` or derivations instead.'
404 expectFailure
'toSource { root = ./.; fileset = cleanSourceWith { src = ./.; }; }' 'lib.fileset.toSource: `fileset` is a `lib.sources`-based value, but it should be a file set or a path instead.
405 [[:blank:]]*To convert a `lib.sources`-based value to a file set you can use `lib.fileset.fromSource`.
406 [[:blank:]]*Note that this only works for sources created from paths.'
408 # Path coercion errors for non-existent paths
409 expectFailure
'toSource { root = ./.; fileset = ./a; }' 'lib.fileset.toSource: `fileset` \('"$work"'/a\) is a path that does not exist.
410 [[:blank:]]*To create a file set from a path that may not exist, use `lib.fileset.maybeMissing`.'
412 # File sets cannot be evaluated directly
413 expectFailure
'union ./. ./.' 'lib.fileset: Directly evaluating a file set is not supported.
414 [[:blank:]]*To turn it into a usable source, use `lib.fileset.toSource`.
415 [[:blank:]]*To pretty-print the contents, use `lib.fileset.trace` or `lib.fileset.traceVal`.'
416 expectFailure
'_emptyWithoutBase' 'lib.fileset: Directly evaluating a file set is not supported.
417 [[:blank:]]*To turn it into a usable source, use `lib.fileset.toSource`.
418 [[:blank:]]*To pretty-print the contents, use `lib.fileset.trace` or `lib.fileset.traceVal`.'
420 # Past versions of the internal representation are supported
421 expectEqual
'_coerce "<tests>: value" { _type = "fileset"; _internalVersion = 0; _internalBase = ./.; }' \
422 '{ _internalBase = ./.; _internalBaseComponents = path.subpath.components (path.splitRoot ./.).subpath; _internalBaseRoot = /.; _internalIsEmptyWithoutBase = false; _internalVersion = 3; _type = "fileset"; }'
423 expectEqual
'_coerce "<tests>: value" { _type = "fileset"; _internalVersion = 1; }' \
424 '{ _type = "fileset"; _internalIsEmptyWithoutBase = false; _internalVersion = 3; }'
425 expectEqual
'_coerce "<tests>: value" { _type = "fileset"; _internalVersion = 2; }' \
426 '{ _type = "fileset"; _internalIsEmptyWithoutBase = false; _internalVersion = 3; }'
428 # Future versions of the internal representation are unsupported
429 expectFailure
'_coerce "<tests>: value" { _type = "fileset"; _internalVersion = 4; }' '<tests>: value is a file set created from a future version of the file set library with a different internal representation:
430 [[:blank:]]*- Internal version of the file set: 4
431 [[:blank:]]*- Internal version of the library: 3
432 [[:blank:]]*Make sure to update your Nixpkgs to have a newer version of `lib.fileset`.'
434 # _create followed by _coerce should give the inputs back without any validation
436 inherit (_coerce "<test>" (_create ./. "directory"))
437 _internalVersion _internalBase _internalTree;
438 }' '{ _internalBase = ./.; _internalTree = "directory"; _internalVersion = 3; }'
440 #### Resulting store path ####
442 # The store path name should be "source"
443 expectEqual
'toSource { root = ./.; fileset = ./.; }' 'sources.cleanSourceWith { name = "source"; src = ./.; }'
445 # We should be able to import an empty directory and end up with an empty result
450 # The empty value without a base should also result in an empty result
454 checkFileset
'_emptyWithoutBase'
456 # Directories recursively containing no files are not included
467 # Check trees that could cause a naïve string prefix checking implementation to fail
475 checkFileset
'./ab/xy'
477 # Check path coercion examples in ../../doc/functions/fileset.section.md
518 # Test the source filter for the somewhat special case of files in the filesystem root
519 # We can't easily test this with the above functions because we can't write to the filesystem root and we don't want to make any assumptions which files are there in the sandbox
520 expectEqual
'_toSourceFilter (_create /. null) "/foo" ""' 'false'
521 expectEqual
'_toSourceFilter (_create /. { foo = "regular"; }) "/foo" ""' 'true'
522 expectEqual
'_toSourceFilter (_create /. { foo = null; }) "/foo" ""' 'false'
525 ## lib.fileset.toList
526 # This function is mainly tested in checkFileset
528 # The error context for an invalid argument must be correct
529 expectFailure
'toList null' 'lib.fileset.toList: Argument is of type null, but it should be a file set or a path instead.'
531 # Works for the empty fileset
532 expectEqual
'toList _emptyWithoutBase' '[ ]'
534 # Works on empty paths
535 expectEqual
'toList ./.' '[ ]'
538 ## lib.fileset.union, lib.fileset.unions
541 # Different filesystem roots in root and fileset are not supported
542 mkdir
-p {foo
,bar
}/mock-root
543 expectFailure
'with ((import <nixpkgs/lib>).extend (import <nixpkgs/lib/fileset/mock-splitRoot.nix>)).fileset;
544 toSource { root = ./.; fileset = union ./foo/mock-root ./bar/mock-root; }
545 ' 'lib.fileset.union: Filesystem roots are not the same:
546 [[:blank:]]*First argument: Filesystem root is "'"$work"'/foo/mock-root"
547 [[:blank:]]*Second argument: Filesystem root is "'"$work"'/bar/mock-root"
548 [[:blank:]]*Different filesystem roots are not supported.'
550 expectFailure
'with ((import <nixpkgs/lib>).extend (import <nixpkgs/lib/fileset/mock-splitRoot.nix>)).fileset;
551 toSource { root = ./.; fileset = unions [ ./foo/mock-root ./bar/mock-root ]; }
552 ' 'lib.fileset.unions: Filesystem roots are not the same:
553 [[:blank:]]*Element 0: Filesystem root is "'"$work"'/foo/mock-root"
554 [[:blank:]]*Element 1: Filesystem root is "'"$work"'/bar/mock-root"
555 [[:blank:]]*Different filesystem roots are not supported.'
558 # Coercion errors show the correct context
559 expectFailure
'toSource { root = ./.; fileset = union ./a ./.; }' 'lib.fileset.union: First argument \('"$work"'/a\) is a path that does not exist.'
560 expectFailure
'toSource { root = ./.; fileset = union ./. ./b; }' 'lib.fileset.union: Second argument \('"$work"'/b\) is a path that does not exist.'
561 expectFailure
'toSource { root = ./.; fileset = unions [ ./a ./. ]; }' 'lib.fileset.unions: Element 0 \('"$work"'/a\) is a path that does not exist.'
562 expectFailure
'toSource { root = ./.; fileset = unions [ ./. ./b ]; }' 'lib.fileset.unions: Element 1 \('"$work"'/b\) is a path that does not exist.'
564 # unions needs a list
565 expectFailure
'toSource { root = ./.; fileset = unions null; }' 'lib.fileset.unions: Argument is of type null, but it should be a list instead.'
567 # The tree of later arguments should not be evaluated if a former argument already includes all files
569 checkFileset
'union ./. (_create ./. (abort "This should not be used!"))'
570 checkFileset
'unions [ ./. (_create ./. (abort "This should not be used!")) ]'
572 # unions doesn't include any files for an empty list or only empty values without a base
577 checkFileset
'unions [ ]'
578 checkFileset
'unions [ _emptyWithoutBase ]'
579 checkFileset
'unions [ _emptyWithoutBase _emptyWithoutBase ]'
580 checkFileset
'union _emptyWithoutBase _emptyWithoutBase'
582 # The empty value without a base is the left and right identity of union
587 checkFileset
'union ./x _emptyWithoutBase'
588 checkFileset
'union _emptyWithoutBase ./x'
590 # union doesn't include files that weren't specified
596 checkFileset
'union ./x ./y'
597 checkFileset
'unions [ ./x ./y ]'
599 # Also for directories
608 checkFileset
'union ./x ./y'
609 checkFileset
'unions [ ./x ./y ]'
611 # And for very specific paths
620 checkFileset
'union ./x/a ./y/b'
621 checkFileset
'unions [ ./x/a ./y/b ]'
623 # unions or chained union's can include more paths
632 checkFileset
'unions [ ./x/a ./x/b ./y/a ./z/b ]'
633 checkFileset
'union (union ./x/a ./x/b) (union ./y/a ./z/b)'
634 checkFileset
'union (union (union ./x/a ./x/b) ./y/a) ./z/b'
636 # unions should not stack overflow, even if many elements are passed
638 for i
in $
(seq 1000); do
642 # This is actually really hard to test:
643 # A lot of files would be needed to cause a stack overflow.
644 # And while we could limit the maximum stack size using `ulimit -s`,
645 # that turns out to not be very deterministic: https://github.com/NixOS/nixpkgs/pull/256417#discussion_r1339396686.
646 # Meanwhile, the test infra here is not the fastest, creating 10000 would be too slow.
647 # So, just using 1000 files for now.
648 checkFileset
'unions (mapAttrsToList (name: _: ./. + "/${name}/a") (builtins.readDir ./.))'
651 ## lib.fileset.intersection
654 # Different filesystem roots in root and fileset are not supported
655 mkdir
-p {foo
,bar
}/mock-root
656 expectFailure
'with ((import <nixpkgs/lib>).extend (import <nixpkgs/lib/fileset/mock-splitRoot.nix>)).fileset;
657 toSource { root = ./.; fileset = intersection ./foo/mock-root ./bar/mock-root; }
658 ' 'lib.fileset.intersection: Filesystem roots are not the same:
659 [[:blank:]]*First argument: Filesystem root is "'"$work"'/foo/mock-root"
660 [[:blank:]]*Second argument: Filesystem root is "'"$work"'/bar/mock-root"
661 [[:blank:]]*Different filesystem roots are not supported.'
664 # Coercion errors show the correct context
665 expectFailure
'toSource { root = ./.; fileset = intersection ./a ./.; }' 'lib.fileset.intersection: First argument \('"$work"'/a\) is a path that does not exist.'
666 expectFailure
'toSource { root = ./.; fileset = intersection ./. ./b; }' 'lib.fileset.intersection: Second argument \('"$work"'/b\) is a path that does not exist.'
668 # The tree of later arguments should not be evaluated if a former argument already excludes all files
672 checkFileset
'intersection _emptyWithoutBase (_create ./. (abort "This should not be used!"))'
673 # We don't have any combinators that can explicitly remove files yet, so we need to rely on internal functions to test this for now
674 checkFileset
'intersection (_create ./. { a = null; }) (_create ./. { a = abort "This should not be used!"; })'
676 # If either side is empty, the result is empty
680 checkFileset
'intersection _emptyWithoutBase _emptyWithoutBase'
681 checkFileset
'intersection _emptyWithoutBase (_create ./. null)'
682 checkFileset
'intersection (_create ./. null) _emptyWithoutBase'
683 checkFileset
'intersection (_create ./. null) (_create ./. null)'
685 # If the intersection base paths are not overlapping, the result is empty and has no base path
688 expectEqual
'toSource { root = ./c; fileset = intersection ./a ./b; }' 'toSource { root = ./c; fileset = _emptyWithoutBase; }'
691 # If the intersection exists, the resulting base path is the longest of them
694 expectEqual
'toSource { root = ./a; fileset = intersection ./a ./.; }' 'toSource { root = ./a; fileset = ./a; }'
695 expectEqual
'toSource { root = ./a; fileset = intersection ./. ./a; }' 'toSource { root = ./a; fileset = ./a; }'
698 # Also finds the intersection with null'd filesetTree's
704 checkFileset
'intersection (_create ./. { a = "regular"; b = "regular"; c = null; }) (_create ./. { a = null; b = "regular"; c = "regular"; })'
706 # Actually computes the intersection between files
715 checkFileset
'intersection (unions [ ./a ./b ./c ./d ]) (unions [ ./c ./d ./e ./f ])'
725 checkFileset
'intersection ./b ./.'
726 checkFileset
'intersection ./b (unions [ ./a/x ./a/y ./b/x ./b/y ./c/x ./c/y ])'
738 checkFileset
'intersection (unions [ ./a/b ./c/d ./c/e ]) (unions [ ./a ./c/d/f ./c/e ])'
742 # Subtracting something from itself results in nothing
746 checkFileset
'difference ./. ./.'
748 # The tree of the second argument should not be evaluated if not needed
749 checkFileset
'difference _emptyWithoutBase (_create ./. (abort "This should not be used!"))'
750 checkFileset
'difference (_create ./. null) (_create ./. (abort "This should not be used!"))'
752 # Subtracting nothing gives the same thing back
756 checkFileset
'difference ./. _emptyWithoutBase'
757 checkFileset
'difference ./. (_create ./. null)'
759 # Subtracting doesn't influence the base path
762 expectEqual
'toSource { root = ./a; fileset = difference ./a ./b; }' 'toSource { root = ./a; fileset = ./a; }'
765 # Also not the other way around
767 expectFailure
'toSource { root = ./a; fileset = difference ./. ./a; }' 'lib.fileset.toSource: `fileset` could contain files in '"$work"', which is not under the `root` \('"$work"'/a\). Potential solutions:
768 [[:blank:]]*- Set `root` to '"$work"' or any directory higher up. This changes the layout of the resulting store path.
769 [[:blank:]]*- Set `fileset` to a file set that cannot contain files outside the `root` \('"$work"'/a\). This could change the files included in the result.'
772 # Difference actually works
773 # We test all combinations of ./., ./a, ./a/x and ./b
780 checkFileset
'difference ./. ./.'
781 checkFileset
'difference ./a ./.'
782 checkFileset
'difference ./a/x ./.'
783 checkFileset
'difference ./b ./.'
784 checkFileset
'difference ./a ./a'
785 checkFileset
'difference ./a/x ./a'
786 checkFileset
'difference ./a/x ./a/x'
787 checkFileset
'difference ./b ./b'
794 checkFileset
'difference ./. ./a'
801 checkFileset
'difference ./a ./b'
808 checkFileset
'difference ./a/x ./b'
815 checkFileset
'difference ./a ./a/x'
822 checkFileset
'difference ./b ./a'
823 checkFileset
'difference ./b ./a/x'
830 checkFileset
'difference ./. ./a/x'
837 checkFileset
'difference ./. ./b'
841 # The first argument needs to be a function
842 expectFailure
'fileFilter null (abort "this is not needed")' 'lib.fileset.fileFilter: First argument is of type null, but it should be a function instead.'
844 # The second argument needs to be an existing path
845 expectFailure
'fileFilter (file: abort "this is not needed") _emptyWithoutBase' 'lib.fileset.fileFilter: Second argument is a file set, but it should be a path instead.
846 [[:blank:]]*If you need to filter files in a file set, use `intersection fileset \(fileFilter pred \./\.\)` instead.'
847 expectFailure
'fileFilter (file: abort "this is not needed") null' 'lib.fileset.fileFilter: Second argument is of type null, but it should be a path instead.'
848 expectFailure
'fileFilter (file: abort "this is not needed") ./a' 'lib.fileset.fileFilter: Second argument \('"$work"'/a\) is a path that does not exist.'
850 # The predicate is not called when there's no files
852 checkFileset
'fileFilter (file: abort "this is not needed") ./.'
854 # The predicate must be able to handle extra attributes
856 expectFailure
'toSource { root = ./.; fileset = fileFilter ({ name, type, hasExt }: true) ./.; }' 'called with unexpected argument '\''"lib.fileset.fileFilter: The predicate function passed as the first argument must be able to handle extra attributes for future compatibility. If you'\''re using `\{ name, file, hasExt \}:`, use `\{ name, file, hasExt, ... \}:` instead."'\'
859 # .name is the name, and it works correctly, even recursively
868 checkFileset
'fileFilter (file: file.name == "a") ./.'
877 checkFileset
'fileFilter (file: file.name != "a") ./.'
879 # `.type` is the file type
885 'toSource { root = ./.; fileset = fileFilter (file: file.type == "regular") ./.; }' \
886 'toSource { root = ./.; fileset = ./d/a; }'
888 'toSource { root = ./.; fileset = fileFilter (file: file.type == "symlink") ./.; }' \
889 'toSource { root = ./.; fileset = ./d/b; }'
891 'toSource { root = ./.; fileset = fileFilter (file: file.type == "unknown") ./.; }' \
892 'toSource { root = ./.; fileset = ./d/c; }'
894 'toSource { root = ./.; fileset = fileFilter (file: file.type != "regular") ./.; }' \
895 'toSource { root = ./.; fileset = union ./d/b ./d/c; }'
897 'toSource { root = ./.; fileset = fileFilter (file: file.type != "symlink") ./.; }' \
898 'toSource { root = ./.; fileset = union ./d/a ./d/c; }'
900 'toSource { root = ./.; fileset = fileFilter (file: file.type != "unknown") ./.; }' \
901 'toSource { root = ./.; fileset = union ./d/a ./d/b; }'
904 # Check that .hasExt checks for the file extension
905 # The empty extension is the same as a file ending with a .
913 checkFileset
'fileFilter (file: file.hasExt "") ./.'
915 # It can check for the last extension
923 checkFileset
'fileFilter (file: file.hasExt "a") ./.'
925 # It can check for any extension
929 checkFileset
'fileFilter (file:
942 # Note that union evaluates the first argument first if necessary, that's why we can use ./c/a here
943 checkFileset
'union ./c/a (fileFilter (file: assert file.name != "a"; true) ./.)'
944 # but here we need to use ./c
945 checkFileset
'union (fileFilter (file: assert file.name != "a"; true) ./.) ./c'
947 # Make sure single files are filtered correctly
952 checkFileset
'fileFilter (file: assert file.name == "a"; true) ./a'
957 checkFileset
'fileFilter (file: assert file.name == "a"; false) ./a'
961 # The second trace argument is returned
962 expectEqual
'trace ./. "some value"' 'builtins.trace "(empty)" "some value"'
964 # The fileset traceVal argument is returned
965 expectEqual
'traceVal ./.' 'builtins.trace "(empty)" (_create ./. "directory")'
967 # The tracing happens before the final argument is needed
968 expectEqual
'trace ./.' 'builtins.trace "(empty)" (x: x)'
970 # Tracing an empty directory shows it as such
971 expectTrace
'./.' '(empty)'
973 # This also works if there are directories, but all recursively without files
975 expectTrace
'./.' '(empty)'
978 # The empty file set without a base also prints as empty
979 expectTrace
'_emptyWithoutBase' '(empty)'
980 expectTrace
'unions [ ]' '(empty)'
983 expectTrace
'intersection ./foo ./bar' '(empty)'
986 # If a directory is fully included, print it as such
988 expectTrace
'./.' "$work"' (all files in directory)'
991 # If a directory is not fully included, recurse
993 touch a
/{x
,y
} b
/{x
,y
}
994 expectTrace
'union ./a/x ./b' "$work"'
997 - b (all files in directory)'
1000 # If an included path is a file, print its type
1004 expectTrace
'unions [ ./a ./b ./c ]' "$work"'
1010 # Do not print directories without any files recursively
1013 expectTrace
'unions [ ./a ./b ]' "$work"'
1017 # If all children are either fully included or empty directories,
1018 # the parent should be printed as fully included
1021 expectTrace
'union ./a ./b' "$work"' (all files in directory)'
1027 # If all children are either fully excluded or empty directories,
1028 # the parent should be shown (or rather not shown) as fully excluded
1029 expectTrace
'unions [ ./a ./x/b ./x/c ]' "$work"'
1033 # Completely filtered out directories also print as empty
1035 expectTrace
'_create ./. {}' '(empty)'
1038 # A general test to make sure the resulting format makes sense
1039 # Such as indentation and ordering
1040 mkdir
-p bar
/{qux
,someDir
}
1041 touch bar
/{baz
,qux
,someDir
/a
} foo
1045 expectTrace
'unions [
1057 - someDir (all files in directory)
1061 # For recursively included directories,
1062 # `(all files in directory)` should only be used if there's at least one file (otherwise it would be `(empty)`)
1063 # and this should be determined without doing a full search
1065 # a is intentionally ordered first here in order to allow triggering the short-circuit behavior
1066 # We then check that b is not read
1067 # In a more realistic scenario, some directories might need to be recursed into,
1068 # but a file would be quickly found to trigger the short-circuit.
1071 # We don't have lambda's in bash unfortunately,
1072 # so we just define a function instead and then pass its name
1073 # shellcheck disable=SC2317
1075 # This shouldn't read b/
1076 expectTrace
'./.' "$work"' (all files in directory)'
1077 # Remove all files immediately after, triggering delete_self events for all of them
1080 # Runs the function while checking that b isn't read
1081 withFileMonitor run b
1084 # Partially included directories trace entries as they are evaluated
1086 expectTrace
'_create ./. { a = null; b = "regular"; c = throw "b"; }' "$work"'
1089 # Except entries that need to be evaluated to even figure out if it's only partially included:
1090 # Here the directory could be fully excluded or included just from seeing a and b,
1091 # so c needs to be evaluated before anything can be traced
1092 expectTrace
'_create ./. { a = null; b = null; c = throw "c"; }' ''
1093 expectTrace
'_create ./. { a = "regular"; b = "regular"; c = throw "c"; }' ''
1096 # We can trace large directories (10000 here) without any problems
1097 filesToCreate
=({0.
.9}{0.
.9}{0.
.9}{0.
.9})
1098 expectedTrace
=$work$
'\n'$
(printf -- '- %s (regular)\n' "${filesToCreate[@]}")
1099 # We need an excluded file so it doesn't print as `(all files in directory)`
1100 touch 0 "${filesToCreate[@]}"
1101 expectTrace
'unions (mapAttrsToList (n: _: ./. + "/${n}") (removeAttrs (builtins.readDir ./.) [ "0" ]))' "$expectedTrace"
1104 ## lib.fileset.fromSource
1106 # Check error messages
1108 # String-like values are not supported
1109 expectFailure
'fromSource (lib.cleanSource "")' 'lib.fileset.fromSource: The source origin of the argument is a string-like value \(""\), but it should be a path instead.
1110 [[:blank:]]*Sources created from paths in strings cannot be turned into file sets, use `lib.sources` or derivations instead.'
1113 expectFailure
'fromSource null' 'lib.fileset.fromSource: The source origin of the argument is of type null, but it should be a path instead.'
1114 expectFailure
'fromSource (lib.cleanSource null)' 'lib.fileset.fromSource: The source origin of the argument is of type null, but it should be a path instead.'
1116 # fromSource on non-existent paths gives an error
1117 expectFailure
'fromSource ./a' 'lib.fileset.fromSource: The source origin \('"$work"'/a\) of the argument is a path that does not exist.'
1119 # fromSource on a path works and is the same as coercing that path
1122 expectEqual
'trace (fromSource ./.) null' 'trace ./. null'
1125 # Check that converting to a file set doesn't read the included files
1129 expectEqual
"trace (fromSource (lib.cleanSourceWith { src = ./a; })) null" "builtins.trace \"$work/a (all files in directory)\" null"
1132 withFileMonitor run a
/b
1135 # Check that converting to a file set doesn't read entries for directories that are filtered out
1139 expectEqual
"trace (fromSource (lib.cleanSourceWith {
1141 filter = pathString: type: false;
1142 })) null" "builtins.trace \"(empty)\" null"
1146 withFileMonitor run a
/b
1149 # The filter is not needed on empty directories
1150 expectEqual
'trace (fromSource (lib.cleanSourceWith {
1152 filter = abort "filter should not be needed";
1153 })) null' 'trace _emptyWithoutBase null'
1155 # Single files also work
1157 expectEqual
'trace (fromSource (cleanSourceWith { src = ./a; })) null' 'trace ./a null'
1160 # For a tree assigning each subpath true/false,
1161 # check whether a source filter with those results includes the same files
1162 # as a file set created using fromSource. Usage:
1165 # [a]=1 # ./a is a file and the filter should return true for it
1166 # [b/]=0 # ./b is a directory and the filter should return false for it
1172 # Serialise the tree as JSON (there's only minimal savings with jq,
1173 # and we don't need to handle escapes)
1177 for p
in "${!tree[@]}"; do
1178 if [[ -z "$first" ]]; then
1184 case "${tree[$p]}" in
1192 die
"Unsupported tree value: ${tree[$p]}"
1196 } > "$tmp/tree.json"
1198 # An expression to create a source value with a filter matching the tree
1201 tree = importJSON '"$tmp"'/tree.json;
1208 stripped = removePrefix (toString ./. + "/") pathString;
1209 key = stripped + optionalString (type == "directory") "/";
1212 (throw "tree key ${key} missing");
1219 fileset = fromSource ('"$sourceExpr"');
1223 # Turn both into store paths
1224 sourceStorePath
=$
(expectStorePath
"$sourceExpr")
1225 filesetStorePath
=$
(expectStorePath
"$filesetExpr")
1227 # Loop through each path in the tree
1228 while IFS
= read -r -d $
'\0' subpath
; do
1229 if [[ ! -e "$sourceStorePath"/"$subpath" ]]; then
1230 # If it's not in the source store path, it's also not in the file set store path
1231 if [[ -e "$filesetStorePath"/"$subpath" ]]; then
1232 die
"The store path $sourceStorePath created by $expr doesn't contain $subpath, but the corresponding store path $filesetStorePath created via fromSource does contain $subpath"
1234 elif [[ -z "$(find "$sourceStorePath"/"$subpath" -type f)" ]]; then
1235 # If it's an empty directory in the source store path, it shouldn't be in the file set store path
1236 if [[ -e "$filesetStorePath"/"$subpath" ]]; then
1237 die
"The store path $sourceStorePath created by $expr contains the path $subpath without any files, but the corresponding store path $filesetStorePath created via fromSource didn't omit it"
1240 # If it's non-empty directory or a file, it should be in the file set store path
1241 if [[ ! -e "$filesetStorePath"/"$subpath" ]]; then
1242 die
"The store path $sourceStorePath created by $expr contains the non-empty path $subpath, but the corresponding store path $filesetStorePath created via fromSource doesn't include it"
1245 done < <(find .
-mindepth 1 -print0)
1250 # Check whether the filter is evaluated correctly
1259 # We fill out the above tree values with all possible combinations of 0 and 1
1260 # Then check whether a filter based on those return values gets turned into the corresponding file set
1261 for i
in $
(seq 0 $
((2 ** ${#tree[@]} - 1 ))); do
1262 for p
in "${!tree[@]}"; do
1263 tree
[$p]=$
(( i
% 2 ))
1264 (( i
/= 2 )) || true
1269 # The filter is called with the same arguments in the same order
1273 trace (fromSource (cleanSourceWith {
1275 filter = pathString: type: builtins.trace "${pathString} ${toString type}" true;
1278 builtins.seq (cleanSourceWith {
1280 filter = pathString: type: builtins.trace "${pathString} ${toString type}" true;
1282 builtins.trace "'"$work"' (all files in directory)"
1287 # Test that if a directory is not included, the filter isn't called on its contents
1290 expectEqual
'trace (fromSource (cleanSourceWith {
1292 filter = pathString: type:
1293 if pathString == toString ./a then
1295 else if pathString == toString ./b then
1297 else if pathString == toString ./b/d then
1300 abort "This filter should not be called with path ${pathString}";
1301 })) null' 'trace (_create ./. { b = "directory"; }) null'
1304 # The filter is called lazily:
1305 # If a later say intersection removes a part of the tree, the filter won't run on it
1308 expectEqual
'trace (intersection ./a (fromSource (lib.cleanSourceWith {
1310 filter = pathString: type:
1311 if pathString == toString ./a || pathString == toString ./a/b then
1313 else if pathString == toString ./a/c then
1316 abort "filter should not be called on ${pathString}";
1317 }))) null' 'trace ./a/b null'
1320 ## lib.fileset.gitTracked/gitTrackedWith
1322 # The first/second argument has to be a path
1323 expectFailure
'gitTracked null' 'lib.fileset.gitTracked: Expected the argument to be a path, but it'\''s a null instead.'
1324 expectFailure
'gitTrackedWith {} null' 'lib.fileset.gitTrackedWith: Expected the second argument to be a path, but it'\''s a null instead.'
1326 # The path must be a directory
1328 expectFailure
'gitTracked ./a' 'lib.fileset.gitTracked: Expected the argument \('"$work"'/a\) to be a directory, but it'\''s a file instead'
1329 expectFailure
'gitTrackedWith {} ./a' 'lib.fileset.gitTrackedWith: Expected the second argument \('"$work"'/a\) to be a directory, but it'\''s a file instead'
1332 # The path has to contain a .git directory
1333 expectFailure
'gitTracked ./.' 'lib.fileset.gitTracked: Expected the argument \('"$work"'\) to point to a local working tree of a Git repository, but it'\''s not.'
1334 expectFailure
'gitTrackedWith {} ./.' 'lib.fileset.gitTrackedWith: Expected the second argument \('"$work"'\) to point to a local working tree of a Git repository, but it'\''s not.'
1336 # recurseSubmodules has to be a boolean
1337 expectFailure
'gitTrackedWith { recurseSubmodules = null; } ./.' 'lib.fileset.gitTrackedWith: Expected the attribute `recurseSubmodules` of the first argument to be a boolean, but it'\''s a null instead.'
1339 # recurseSubmodules = true is not supported on all Nix versions
1340 if [[ "$(nix-instantiate --eval --expr "$prefixExpression (versionAtLeast builtins.nixVersion _fetchGitSubmodulesMinver
)")" == true
]]; then
1341 fetchGitSupportsSubmodules
=1
1343 fetchGitSupportsSubmodules
=
1344 expectFailure
'gitTrackedWith { recurseSubmodules = true; } ./.' 'lib.fileset.gitTrackedWith: Setting the attribute `recurseSubmodules` to `true` is only supported for Nix version 2.4 and after, but Nix version [0-9.]+ is used.'
1347 # Checks that `gitTrackedWith` contains the same files as `git ls-files`
1348 # for the current working directory.
1349 # If --recurse-submodules is passed, the flag is passed through to `git ls-files`
1350 # and as `recurseSubmodules` to `gitTrackedWith`
1351 checkGitTrackedWith
() {
1352 if [[ "${1:-}" == "--recurse-submodules" ]]; then
1353 gitLsFlags
="--recurse-submodules"
1354 gitTrackedArg
="{ recurseSubmodules = true; }"
1360 # All files listed by `git ls-files`
1362 while IFS
= read -r -d $
'\0' file; do
1363 # If there are submodules but --recurse-submodules isn't passed,
1364 # `git ls-files` lists them as empty directories,
1365 # we need to filter that out since we only want to check/count files
1366 if [[ -f "$file" ]]; then
1367 expectedFiles
+=("$file")
1369 done < <(git ls-files
-z $gitLsFlags)
1371 storePath
=$
(expectStorePath
'toSource { root = ./.; fileset = gitTrackedWith '"$gitTrackedArg"' ./.; }')
1373 # Check that each expected file is also in the store path with the same content
1374 for expectedFile
in "${expectedFiles[@]}"; do
1375 if [[ ! -e "$storePath"/"$expectedFile" ]]; then
1376 die
"Expected file $expectedFile to exist in $storePath, but it doesn't.\nGit status:\n$(git status)\nStore path contents:\n$(find "$storePath")"
1378 if ! diff "$expectedFile" "$storePath"/"$expectedFile"; then
1379 die
"Expected file $expectedFile to have the same contents as in $storePath, but it doesn't.\nGit status:\n$(git status)\nStore path contents:\n$(find "$storePath")"
1383 # This is a cheap way to verify the inverse: That all files in the store path are also expected
1384 # We just count the number of files in both and verify they're the same
1385 actualFileCount
=$
(find "$storePath" -type f
-printf . |
wc -c)
1386 if [[ "${#expectedFiles[@]}" != "$actualFileCount" ]]; then
1387 die
"Expected ${#expectedFiles[@]} files in $storePath, but got $actualFileCount.\nGit status:\n$(git status)\nStore path contents:\n$(find "$storePath")"
1392 # Runs checkGitTrackedWith with and without --recurse-submodules
1393 # Allows testing both variants together
1396 if [[ -n "$fetchGitSupportsSubmodules" ]]; then
1397 checkGitTrackedWith
--recurse-submodules
1403 # Only repo-local config
1404 git
-C "$1" config user.name
"Nixpkgs"
1405 git
-C "$1" config user.email
"nixpkgs@nixos.org"
1406 # Get at least a HEAD commit, needed for older Nix versions
1407 git
-C "$1" commit
-q --allow-empty -m "Empty commit"
1410 # Check that gitTracked[With] works as expected when evaluated out-of-tree
1412 ## First we create a git repositories (and a subrepository) with `default.nix` files referring to their local paths
1413 ## Simulating how it would be used in the wild
1415 echo '{ fs }: fs.toSource { root = ./.; fileset = fs.gitTracked ./.; }' > default.nix
1418 ## We can evaluate it locally just fine, `fetchGit` is used underneath to filter git-tracked files
1419 expectEqual
'(import ./. { fs = lib.fileset; }).outPath' '(builtins.fetchGit ./.).outPath'
1421 ## We can also evaluate when importing from fetched store paths
1422 storePath
=$
(expectStorePath
'builtins.fetchGit ./.')
1423 expectEqual
'(import '"$storePath"' { fs = lib.fileset; }).outPath' \""$storePath"\"
1425 ## But it fails if the path is imported with a fetcher that doesn't remove .git (like just using "${./.}")
1426 expectFailure
'import "${./.}" { fs = lib.fileset; }' 'lib.fileset.gitTracked: The argument \(.*\) is a store path within a working tree of a Git repository.
1427 [[:blank:]]*This indicates that a source directory was imported into the store using a method such as `import "\$\{./.\}"` or `path:.`.
1428 [[:blank:]]*This function currently does not support such a use case, since it currently relies on `builtins.fetchGit`.
1429 [[:blank:]]*You could make this work by using a fetcher such as `fetchGit` instead of copying the whole repository.
1430 [[:blank:]]*If you can'\''t avoid copying the repo to the store, see https://github.com/NixOS/nix/issues/9292.'
1432 ## Even with submodules
1433 if [[ -n "$fetchGitSupportsSubmodules" ]]; then
1434 ## Both the main repo with the submodule
1435 echo '{ fs }: fs.toSource { root = ./.; fileset = fs.gitTrackedWith { recurseSubmodules = true; } ./.; }' > default.nix
1437 git submodule add .
/sub sub
>/dev
/null
1438 ## But also the submodule itself
1439 echo '{ fs }: fs.toSource { root = ./.; fileset = fs.gitTracked ./.; }' > sub
/default.nix
1442 ## We can evaluate it locally just fine, `fetchGit` is used underneath to filter git-tracked files
1443 expectEqual
'(import ./. { fs = lib.fileset; }).outPath' '(builtins.fetchGit { url = ./.; submodules = true; }).outPath'
1444 expectEqual
'(import ./sub { fs = lib.fileset; }).outPath' '(builtins.fetchGit ./sub).outPath'
1446 ## We can also evaluate when importing from fetched store paths
1447 storePathWithSub
=$
(expectStorePath
'builtins.fetchGit { url = ./.; submodules = true; }')
1448 expectEqual
'(import '"$storePathWithSub"' { fs = lib.fileset; }).outPath' \""$storePathWithSub"\"
1449 storePathSub
=$
(expectStorePath
'builtins.fetchGit ./sub')
1450 expectEqual
'(import '"$storePathSub"' { fs = lib.fileset; }).outPath' \""$storePathSub"\"
1452 ## But it fails if the path is imported with a fetcher that doesn't remove .git (like just using "${./.}")
1453 expectFailure
'import "${./.}" { fs = lib.fileset; }' 'lib.fileset.gitTrackedWith: The second argument \(.*\) is a store path within a working tree of a Git repository.
1454 [[:blank:]]*This indicates that a source directory was imported into the store using a method such as `import "\$\{./.\}"` or `path:.`.
1455 [[:blank:]]*This function currently does not support such a use case, since it currently relies on `builtins.fetchGit`.
1456 [[:blank:]]*You could make this work by using a fetcher such as `fetchGit` instead of copying the whole repository.
1457 [[:blank:]]*If you can'\''t avoid copying the repo to the store, see https://github.com/NixOS/nix/issues/9292.'
1458 expectFailure
'import "${./.}/sub" { fs = lib.fileset; }' 'lib.fileset.gitTracked: The argument \(.*/sub\) is a store path within a working tree of a Git repository.
1459 [[:blank:]]*This indicates that a source directory was imported into the store using a method such as `import "\$\{./.\}"` or `path:.`.
1460 [[:blank:]]*This function currently does not support such a use case, since it currently relies on `builtins.fetchGit`.
1461 [[:blank:]]*You could make this work by using a fetcher such as `fetchGit` instead of copying the whole repository.
1462 [[:blank:]]*If you can'\''t avoid copying the repo to the store, see https://github.com/NixOS/nix/issues/9292.'
1466 # shallow = true is not supported on all Nix versions
1467 # and older versions don't support shallow clones at all
1468 if [[ "$(nix-instantiate --eval --expr "$prefixExpression (versionAtLeast builtins.nixVersion _fetchGitShallowMinver
)")" == true
]]; then
1470 # Extra commit such that there's a commit that won't be in the shallow clone
1471 git
-C full commit
--allow-empty -q -m extra
1472 git clone
-q --depth 1 "file://${PWD}/full" shallow
1479 # Go through all stages of Git files
1480 # See https://www.git-scm.com/book/en/v2/Git-Basics-Recording-Changes-to-the-Repository
1495 git commit
-q -m "Added a"
1515 # Add it regardless (needs -f)
1532 # Untracked submodule
1533 git
-C sub commit
-q --allow-empty -m "Empty commit"
1537 git submodule add .
/sub sub
>/dev
/null
1549 git
-C sub commit
-q -m "Add a"
1557 git
-C sub
rm -f -q a
1562 ## lib.fileset.maybeMissing
1564 # Argument must be a path
1565 expectFailure
'maybeMissing "someString"' 'lib.fileset.maybeMissing: Argument \("someString"\) is a string-like value, but it should be a path instead.'
1566 expectFailure
'maybeMissing null' 'lib.fileset.maybeMissing: Argument is of type null, but it should be a path instead.'
1570 checkFileset
'maybeMissing ./a'
1571 checkFileset
'maybeMissing ./b'
1572 checkFileset
'maybeMissing ./b/c'
1574 # Works on single files
1580 checkFileset
'maybeMissing ./a'
1586 checkFileset
'maybeMissing ./b/c'
1588 # Works on directories
1594 checkFileset
'maybeMissing ./b'
1596 # TODO: Once we have combinators and a property testing library, derive property tests from https://en.wikipedia.org/wiki/Algebra_of_sets