4 // Copyright (C) 2005 Novell, Inc.
8 // Permission is hereby granted, free of charge, to any person obtaining a copy
9 // of this software and associated documentation files (the "Software"), to deal
10 // in the Software without restriction, including without limitation the rights
11 // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
12 // copies of the Software, and to permit persons to whom the Software is
13 // furnished to do so, subject to the following conditions:
15 // The above copyright notice and this permission notice shall be included in all
16 // copies or substantial portions of the Software.
18 // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
19 // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
20 // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
21 // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
22 // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
23 // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
28 using System
.Collections
;
29 using System
.Diagnostics
;
32 using System
.Threading
;
35 using System
.Xml
.Serialization
;
37 using Lucene
.Net
.Documents
;
38 using Lucene
.Net
.Index
;
39 using LNS
= Lucene
.Net
.Search
;
43 using FSQ
= Beagle
.Daemon
.FileSystemQueryable
.FileSystemQueryable
;
44 using Stopwatch
= Beagle
.Util
.Stopwatch
;
46 namespace Beagle
.Daemon
50 static string [] argv
;
52 static bool arg_recursive
= false, arg_delete
= false, arg_debug
= false, arg_cache_text
= false, arg_disable_filtering
= false, arg_disable_restart
= false, arg_disable_directories
= false;
54 static Hashtable remap_table
= new Hashtable ();
56 static string arg_output
, arg_tag
, arg_source
;
58 /////////////////////////////////////////////////////////
60 // Files and directories that are allowed to be in the target
61 // directory before we blow it away. If we encounter any file
62 // or dir not in this list, we'll bail out.
63 static string [] allowed_files
= {
64 "FileAttributesStore.db",
69 static string [] allowed_dirs
= {
76 /////////////////////////////////////////////////////////
78 static FileAttributesStore_Sqlite backing_fa_store
;
79 static FileAttributesStore fa_store
;
81 static LuceneIndexingDriver driver
;
83 static bool crawling
= true, indexing
= true, shutdown
= false, restart
= false;
85 static ArrayList allowed_patterns
= new ArrayList ();
86 static ArrayList denied_patterns
= new ArrayList ();
88 static Queue pending_files
= new Queue ();
89 static Queue pending_directories
= new Queue ();
91 const int BATCH_SIZE
= 30;
93 /////////////////////////////////////////////////////////
95 static void Main (string [] args
)
99 } catch (Exception ex
) {
100 Logger
.Log
.Error (ex
, "Unhandled exception thrown. Exiting immediately.");
101 Environment
.Exit (1);
105 static void DoMain (string [] args
)
107 SystemInformation
.SetProcessName ("beagle-build-index");
113 while (i
< args
.Length
) {
115 string arg
= args
[i
];
117 string next_arg
= i
< args
.Length
? args
[i
] : null;
126 if (next_arg
!= null)
133 arg_recursive
= true;
136 case "--enable-deletion":
140 case "--disable-directories":
141 arg_disable_directories
= true;
144 case "--enable-text-cache":
145 arg_cache_text
= true;
150 if (next_arg == null)
153 int j = next_arg.IndexOf (":");
156 Logger.Log.Error ("Invalid remap argument: {0}", next_arg);
157 Environment.Exit (1);
160 remap_table [next_arg.Substring (0, j)] = next_arg.Substring (j+1);
166 if (next_arg
!= null)
167 arg_output
= Path
.IsPathRooted (next_arg
) ? next_arg
: Path
.GetFullPath (next_arg
);
171 case "--disable-filtering":
172 arg_disable_filtering
= true;
175 case "--allow-pattern":
176 if (next_arg
== null)
179 if (next_arg
.IndexOf (',') != -1) {
180 foreach (string pattern
in next_arg
.Split (','))
181 allowed_patterns
.Add (new ExcludeItem (ExcludeType
.Pattern
, pattern
));
184 allowed_patterns
.Add (new ExcludeItem (ExcludeType
.Pattern
, next_arg
));
190 case "--deny-pattern":
191 if (next_arg
== null)
194 if (next_arg
.IndexOf (',') != -1) {
195 foreach (string pattern
in next_arg
.Split (','))
196 denied_patterns
.Add (new ExcludeItem (ExcludeType
.Pattern
, pattern
));
199 denied_patterns
.Add (new ExcludeItem (ExcludeType
.Pattern
, next_arg
));
205 case "--disable-restart":
206 arg_disable_restart
= true;
210 if (next_arg
== null)
213 arg_source
= next_arg
;
218 string path
= Path
.IsPathRooted (arg
) ? arg
: Path
.GetFullPath (arg
);
219 if (path
!= "/" && path
.EndsWith ("/"))
220 path
= path
.TrimEnd ('/');
222 if (Directory
.Exists (path
))
223 pending_directories
.Enqueue (new DirectoryInfo (path
));
224 else if (File
.Exists (path
))
225 pending_files
.Enqueue (new FileInfo (path
));
232 /////////////////////////////////////////////////////////
234 if (arg_output
== null) {
235 Logger
.Log
.Error ("--target must be specified");
236 Environment
.Exit (1);
239 foreach (FileSystemInfo info
in pending_directories
) {
240 if (Path
.GetFullPath (arg_output
) == info
.FullName
) {
241 Logger
.Log
.Error ("Target directory cannot be one of the source paths.");
242 Environment
.Exit (1);
246 foreach (FileSystemInfo info
in pending_files
) {
247 if (Path
.GetFullPath (arg_output
) == info
.FullName
) {
248 Logger
.Log
.Error ("Target directory cannot be one of the source paths.");
249 Environment
.Exit (1);
253 if (!Directory
.Exists (Path
.GetDirectoryName (arg_output
))) {
254 Logger
.Log
.Error ("Index directory not available for construction: {0}", arg_output
);
255 Environment
.Exit (1);
258 // Be *EXTRA PARANOID* about the contents of the target
259 // directory, because creating an indexing driver will
261 if (Directory
.Exists (arg_output
)) {
263 foreach (FileInfo info
in DirectoryWalker
.GetFileInfos (arg_output
)) {
264 if (Array
.IndexOf (allowed_files
, info
.Name
) == -1) {
265 Logger
.Log
.Error ("{0} doesn't look safe to delete: non-Beagle file {1} was found", arg_output
, info
.FullName
);
266 Environment
.Exit (1);
270 foreach (DirectoryInfo info
in DirectoryWalker
.GetDirectoryInfos (arg_output
)) {
271 if (Array
.IndexOf (allowed_dirs
, info
.Name
) == -1) {
272 Logger
.Log
.Error ("{0} doesn't look safe to delete: non-Beagle directory {1} was found", arg_output
, info
.FullName
);
273 Environment
.Exit (1);
278 // Set the IO priority so we don't slow down the system
279 if (! IoPriority
.SetIdle ())
280 IoPriority
.SetIoPriority (7);
282 driver
= new LuceneIndexingDriver (arg_output
, false);
283 driver
.TextCache
= (arg_cache_text
) ? new TextCache (arg_output
) : null;
284 if (driver
.TextCache
!= null)
285 driver
.TextCache
.WorldReadable
= true;
287 backing_fa_store
= new FileAttributesStore_Sqlite (driver
.TopDirectory
, driver
.Fingerprint
);
288 fa_store
= new FileAttributesStore (backing_fa_store
);
290 // Set up signal handlers
291 SetupSignalHandlers ();
293 Thread crawl_thread
, index_thread
, monitor_thread
= null;
295 Stopwatch watch
= new Stopwatch ();
298 // Start the thread that does the crawling
299 crawl_thread
= ExceptionHandlingThread
.Start (new ThreadStart (CrawlWorker
));
301 // Start the thread that does the actual indexing
302 index_thread
= ExceptionHandlingThread
.Start (new ThreadStart (IndexWorker
));
304 if (!arg_disable_restart
) {
305 // Start the thread that monitors memory usage.
306 monitor_thread
= ExceptionHandlingThread
.Start (new ThreadStart (MemoryMonitorWorker
));
309 // Join all the threads so that we know that we're the only thread still running
310 crawl_thread
.Join ();
311 index_thread
.Join ();
312 if (monitor_thread
!= null)
313 monitor_thread
.Join ();
316 Logger
.Log
.Debug ("Elapsed time {0}.", watch
);
319 Logger
.Log
.Debug ("Restarting helper");
320 Process p
= new Process ();
321 p
.StartInfo
.UseShellExecute
= false;
322 // FIXME: Maybe this isn't the right way to do things? It should be ok,
323 // the PATH is inherited from the shell script which runs mono itself.
324 p
.StartInfo
.FileName
= "mono";
325 p
.StartInfo
.Arguments
= String
.Join (" ", Environment
.GetCommandLineArgs ());
330 /////////////////////////////////////////////////////////////////
332 static void CrawlWorker ()
334 Logger
.Log
.Debug ("Starting CrawlWorker");
340 while (pending_directories
.Count
> 0) {
341 DirectoryInfo dir
= (DirectoryInfo
) pending_directories
.Dequeue ();
343 if (! arg_disable_directories
)
344 pending_files
.Enqueue (dir
);
348 foreach (DirectoryInfo subdir
in DirectoryWalker
.GetDirectoryInfos (dir
))
350 && !FileSystem
.IsSpecialFile (subdir
.FullName
))
351 pending_directories
.Enqueue (subdir
);
353 foreach (FileInfo file
in DirectoryWalker
.GetFileInfos (dir
))
355 && !FileSystem
.IsSpecialFile (file
.FullName
)) {
356 pending_files
.Enqueue (file
);
360 } catch (DirectoryNotFoundException e
) {}
368 Logger
.Log
.Debug ("Scanned {0} files and directories in {1} directories", count_dirs
+ count_files
, count_dirs
);
370 Logger
.Log
.Debug ("CrawlWorker Done");
376 /////////////////////////////////////////////////////////////////
378 static void AddToRequest (IndexerRequest request
, Indexable indexable
)
380 // Disable filtering and only index file attributes
381 if (arg_disable_filtering
)
382 indexable
.Filtering
= IndexableFiltering
.Never
;
384 // Tag the item for easy identification (for say, removal)
386 indexable
.AddProperty (Property
.NewUnsearched("Tag", arg_tag
));
388 if (arg_source
== null) {
389 DirectoryInfo dir
= new DirectoryInfo (StringFu
.SanitizePath (arg_output
));
390 arg_source
= dir
.Name
;
393 indexable
.Source
= arg_source
;
395 request
.Add (indexable
);
398 static IndexerReceipt
[] FlushIndexer (IIndexer indexer
, IndexerRequest request
)
400 IndexerReceipt
[] receipts
;
401 receipts
= indexer
.Flush (request
);
403 ArrayList pending_children
;
404 pending_children
= new ArrayList ();
406 foreach (IndexerReceipt raw_r
in receipts
) {
408 if (raw_r
is IndexerAddedReceipt
) {
409 // Update the file attributes
410 IndexerAddedReceipt r
= (IndexerAddedReceipt
) raw_r
;
412 Indexable indexable
= request
.GetByUri (r
.Uri
);
414 // We don't need to write out any file attributes for
416 if (indexable
.ParentUri
!= null)
419 string path
= r
.Uri
.LocalPath
;
422 attr
= fa_store
.ReadOrCreate (path
);
424 attr
.LastWriteTime
= indexable
.Timestamp
;
425 attr
.FilterName
= r
.FilterName
;
426 attr
.FilterVersion
= r
.FilterVersion
;
428 fa_store
.Write (attr
);
430 } else if (raw_r
is IndexerRemovedReceipt
) {
431 // Update the file attributes
432 IndexerRemovedReceipt r
= (IndexerRemovedReceipt
) raw_r
;
434 Indexable indexable
= request
.GetByUri (r
.Uri
);
436 string path
= r
.Uri
.LocalPath
;
437 Logger
.Log
.Debug ("Removing: '{0}'", path
);
438 fa_store
.Drop (path
);
440 } else if (raw_r
is IndexerChildIndexablesReceipt
) {
441 // Add any child indexables back into our indexer
442 IndexerChildIndexablesReceipt r
= (IndexerChildIndexablesReceipt
) raw_r
;
443 pending_children
.AddRange (r
.Children
);
447 request
.Clear (); // clear out the old request
448 foreach (Indexable i
in pending_children
) // and then add the children
449 AddToRequest (request
, i
);
454 static Indexable
FileToIndexable (FileInfo file
)
456 if (!file
.Exists
|| Ignore (file
) || fa_store
.IsUpToDate (file
.FullName
))
459 // Create the indexable and add the standard properties we
460 // use in the FileSystemQueryable.
461 Uri uri
= UriFu
.PathToFileUri (file
.FullName
);
462 Indexable indexable
= new Indexable (uri
);
463 indexable
.Timestamp
= file
.LastWriteTimeUtc
;
464 FSQ
.AddStandardPropertiesToIndexable (indexable
, file
.Name
, Guid
.Empty
, false);
466 // Store directory name in the index
467 string dirname
= file
.DirectoryName
;
468 indexable
.AddProperty (Property
.NewUnsearched (ParentDirUriPropKey
, UriFu
.PathToFileUri (dirname
)));
473 static Indexable
DirectoryToIndexable (DirectoryInfo dir
, Queue modified_directories
)
478 // Check if the directory information is stored in attributes store
479 // And if the mtime of the directory is same as that in the attributes store
480 FileAttributes attr
= fa_store
.Read (dir
.FullName
);
482 // If the directory exists in the fa store, then it is already indexed
484 if (arg_delete
&& dir
.LastWriteTimeUtc
> attr
.LastWriteTime
)
485 modified_directories
.Enqueue (dir
);
489 // Create the indexable and add the standard properties we
490 // use in the FileSystemQueryable.
491 Uri uri
= UriFu
.PathToFileUri (dir
.FullName
);
492 Indexable indexable
= new Indexable (uri
);
493 indexable
.MimeType
= "inode/directory";
494 indexable
.NoContent
= true;
495 indexable
.Timestamp
= dir
.LastWriteTimeUtc
;
496 FSQ
.AddStandardPropertiesToIndexable (indexable
, dir
.Name
, Guid
.Empty
, false);
498 // Add directory name property
499 string dirname
= dir
.Parent
.FullName
;
500 indexable
.AddProperty (Property
.NewUnsearched (ParentDirUriPropKey
, UriFu
.PathToFileUri (dirname
)));
502 indexable
.AddProperty (Property
.NewBool (IsDirectoryPropKey
, true));
507 static void IndexWorker ()
509 Logger
.Log
.Debug ("Starting IndexWorker");
510 Queue modified_directories
= new Queue ();
514 IndexerRequest pending_request
;
515 pending_request
= new IndexerRequest ();
518 if (pending_files
.Count
> 0) {
519 Object file_or_dir_info
= pending_files
.Dequeue ();
521 if (file_or_dir_info
is DirectoryInfo
)
522 indexable
= DirectoryToIndexable ((DirectoryInfo
) file_or_dir_info
, modified_directories
);
524 indexable
= FileToIndexable ((FileInfo
) file_or_dir_info
);
526 if (indexable
== null)
529 AddToRequest (pending_request
, indexable
);
531 if (pending_request
.Count
>= BATCH_SIZE
) {
532 Logger
.Log
.Debug ("Flushing driver, {0} items in queue", pending_request
.Count
);
533 FlushIndexer (driver
, pending_request
);
534 // FlushIndexer clears the pending_request
537 } else if (crawling
) {
538 //Logger.Log.Debug ("IndexWorker: La la la...");
545 // Time to remove deleted directories from the index and attributes store
546 while (modified_directories
.Count
> 0) {
547 DirectoryInfo subdir
= (DirectoryInfo
) modified_directories
.Dequeue ();
548 Logger
.Log
.Debug ("Checking {0} for deleted files and directories", subdir
.FullName
);
550 // Get a list of all documents from lucene index with ParentDirUriPropKey set as that of subdir
551 ICollection all_dirent
= GetAllItemsInDirectory (subdir
);
552 foreach (Dirent info
in all_dirent
) {
553 // check if the item exists
554 if (File
.Exists (info
.FullName
) ||
555 (info
.IsDirectory
&& Directory
.Exists (info
.FullName
)))
558 if (info
.IsDirectory
)
559 // Recursively remove deleted subdirectories
560 modified_directories
.Enqueue (new DirectoryInfo (info
.FullName
));
563 Uri uri
= UriFu
.PathToFileUri (info
.FullName
);
564 indexable
= new Indexable (IndexableType
.Remove
, uri
);
565 AddToRequest (pending_request
, indexable
);
569 // Call Flush until our request is empty. We have to do this in a loop
570 // because children can get added back to the pending request in a flush.
571 while (pending_request
.Count
> 0)
572 FlushIndexer (driver
, pending_request
);
574 backing_fa_store
.Flush ();
576 Logger
.Log
.Debug ("Optimizing index");
577 driver
.OptimizeNow ();
579 Logger
.Log
.Debug ("IndexWorker Done");
586 private bool is_directory
;
589 public Dirent (string path
, bool is_dir
)
592 this.is_directory
= is_dir
;
595 public bool IsDirectory
{
596 get { return is_directory; }
603 public string FullName
{
604 get { return path.Substring (7); }
608 private class BitArrayHitCollector
: LNS
.HitCollector
{
610 private BetterBitArray matches
;
612 public BitArrayHitCollector (BetterBitArray matches
)
614 this.matches
= matches
;
617 public override void Collect (int id
, float score
)
623 private const string ParentDirUriPropKey
= "beagle:ParentDirUri";
624 private const string IsDirectoryPropKey
= "beagle:IsDirectory";
626 // Returns a list of all files and directories in dir
627 static ICollection
GetAllItemsInDirectory (DirectoryInfo dir
)
630 string parent_uri_str
= UriFu
.PathToFileUri (dir
.FullName
).ToString ();
631 // Instead of taking the painfull way of using BeagleAnalyzer, lets just add the prefix manually
632 //parent_uri_str = "_:" + parent_uri_str;
633 // LuceneCommon thinks exposing secret property type encoding is bad, I think so too... except for now
634 string key
= "prop:k:" + ParentDirUriPropKey
;
635 //Logger.Log.Debug ("Querying for {0}={1}", parent_uri_str, key);
636 LNS
.Query query
= new LNS
.TermQuery (new Term (key
, parent_uri_str
));
639 LNS
.IndexSearcher searcher
;
640 searcher
= LuceneCommon
.GetSearcher (driver
.PrimaryStore
);
642 BetterBitArray matches
;
643 matches
= new BetterBitArray (searcher
.MaxDoc ());
645 BitArrayHitCollector collector
;
646 collector
= new BitArrayHitCollector (matches
);
648 searcher
.Search (query
, null, collector
);
650 // Finally we pull all of the matching documents,
651 // convert them to Dirent, and store them in a list.
653 ArrayList match_list
= new ArrayList ();
655 while (i
< matches
.Count
) {
657 i
= matches
.GetNextTrueIndex (i
);
658 if (i
>= matches
.Count
)
662 doc
= searcher
.Doc (i
);
665 info
= DocumentToDirent (doc
);
667 match_list
.Add (info
);
672 LuceneCommon
.ReleaseSearcher (searcher
);
673 //Logger.Log.Debug ("Found {0} items in {1}", match_list.Count, dir.FullName);
678 static private Dirent
DocumentToDirent (Document doc
)
683 path
= doc
.Get ("Uri");
685 string prop_key
= "prop:k:" + IsDirectoryPropKey
;
686 foreach (Field f
in doc
.Fields ()) {
687 if (f
.Name () != prop_key
)
690 is_dir
= (f
.StringValue ().Substring (2) == "true");
694 //Logger.Log.Debug ("Found: " + path + " (" + is_dir + ")");
695 return new Dirent (path
, is_dir
);
698 /////////////////////////////////////////////////////////////////
700 static void MemoryMonitorWorker ()
702 int vmrss_original
= SystemInformation
.VmRss
;
704 const double threshold
= 6.0;
707 while (! shutdown
&& (crawling
|| indexing
)) {
709 // Check resident memory usage
710 int vmrss
= SystemInformation
.VmRss
;
711 double size
= vmrss
/ (double) vmrss_original
;
712 if (vmrss
!= last_vmrss
)
713 Logger
.Log
.Debug ("Size: VmRSS={0:0.0} MB, size={1:0.00}, {2:0.0}%",
714 vmrss
/1024.0, size
, 100.0 * (size
- 1) / (threshold
- 1));
716 if (size
> threshold
) {
717 Logger
.Log
.Debug ("Process too big, shutting down!");
727 /////////////////////////////////////////////////////////////////
729 // From BeagleDaemon.cs
731 static void SetupSignalHandlers ()
733 // Force OurSignalHandler to be JITed
734 OurSignalHandler (-1);
736 // Set up our signal handler
737 Mono
.Unix
.Native
.Stdlib
.signal (Mono
.Unix
.Native
.Signum
.SIGINT
, OurSignalHandler
);
738 Mono
.Unix
.Native
.Stdlib
.signal (Mono
.Unix
.Native
.Signum
.SIGTERM
, OurSignalHandler
);
739 if (Environment
.GetEnvironmentVariable("BEAGLE_THERE_BE_NO_QUITTIN") == null)
740 Mono
.Unix
.Native
.Stdlib
.signal (Mono
.Unix
.Native
.Signum
.SIGQUIT
, OurSignalHandler
);
743 static void OurSignalHandler (int signal
)
745 // This allows us to call OurSignalHandler w/o doing anything.
746 // We want to call it once to ensure that it is pre-JITed.
750 Logger
.Log
.Debug ("Shutdown Requested");
754 /////////////////////////////////////////////////////////////////
756 static void PrintUsage ()
759 "beagle-build-index: Build an index.\n" +
760 "Web page: http://www.gnome.org/projects/beagle\n" +
761 "Copyright (C) 2005-2006 Novell, Inc.\n\n";
764 "Usage: beagle-build-index [OPTIONS] --target <index_path> <path> [path]\n\n" +
767 "beagle-build-index will *delete all existing data* within the target\n" +
768 "directory. Ensure that the target path is set correctly before running.\n\n" +
771 " --source [name]\t\tThe index's source name. Defaults to the target directory name\n" +
772 // FIXME: remap doesnt seem to be implemented !
773 // Implementing remap might some fixes to --enable-deletion, see IndexWorker
774 //" --remap [path1:path2]\t\tRemap data paths to fit target. \n" +
775 " --tag [tag]\t\t\tTag index data for identification.\n" +
776 " --recursive\t\t\tCrawl source path recursivly.\n" +
777 " --enable-deletion\t\tRemove deleted files and directories from index.\n" +
778 " --enable-text-cache\t\tBuild text-cache of documents used for snippets.\n" +
779 " --disable-directories\t\tDon't add directories to the index.\n" +
780 " --disable-filtering\t\tDisable all filtering of files. Only index attributes.\n" +
781 " --allow-pattern [pattern]\tOnly allow files that match the pattern to be indexed.\n" +
782 " --deny-pattern [pattern]\tKeep any files that match the pattern from being indexed.\n" +
783 " --disable-restart\t\tDon't restart when memory usage gets above a certain threshold.\n" +
784 " --debug\t\t\tEcho verbose debugging information.\n\n";
787 Console
.WriteLine (usage
);
788 Environment
.Exit (0);
791 /////////////////////////////////////////////////////////
793 static Uri
RemapUri (Uri uri
)
795 // FIXME: This is ghetto
796 foreach (DictionaryEntry dict
in remap_table
) {
797 if (uri
.LocalPath
.IndexOf ((string) dict
.Key
) == -1)
799 return new Uri (uri
.LocalPath
.Replace ((string) dict
.Key
, (string) dict
.Value
));
804 static bool Ignore (DirectoryInfo directory
)
806 if (directory
.Name
.StartsWith ("."))
812 static bool Ignore (FileInfo file
)
814 if (file
.Name
.StartsWith ("."))
817 if (FileSystem
.IsSpecialFile (file
.FullName
))
820 if (allowed_patterns
.Count
> 0) {
821 foreach (ExcludeItem pattern
in allowed_patterns
)
822 if (pattern
.IsMatch (file
.Name
))
828 foreach (ExcludeItem pattern
in denied_patterns
)
829 if (pattern
.IsMatch (file
.Name
))
832 // FIXME: Add more stuff here