Merge the recent changes from HEAD onto the branch
[beagle.git] / beagled / BuildIndex.cs
blob6463662688bdfac2c993852d4fe8d6516ffca61f
1 //
2 // BuildIndex.cs
3 //
4 // Copyright (C) 2005 Novell, Inc.
5 //
7 //
8 // Permission is hereby granted, free of charge, to any person obtaining a copy
9 // of this software and associated documentation files (the "Software"), to deal
10 // in the Software without restriction, including without limitation the rights
11 // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
12 // copies of the Software, and to permit persons to whom the Software is
13 // furnished to do so, subject to the following conditions:
15 // The above copyright notice and this permission notice shall be included in all
16 // copies or substantial portions of the Software.
18 // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
19 // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
20 // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
21 // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
22 // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
23 // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
24 // SOFTWARE.
27 using System;
28 using System.Collections;
29 using System.Diagnostics;
30 using System.IO;
31 using System.Net;
32 using System.Threading;
34 using System.Xml;
35 using System.Xml.Serialization;
37 using Lucene.Net.Documents;
38 using Lucene.Net.Index;
39 using LNS = Lucene.Net.Search;
41 using Beagle;
42 using Beagle.Util;
43 using FSQ = Beagle.Daemon.FileSystemQueryable.FileSystemQueryable;
44 using Stopwatch = Beagle.Util.Stopwatch;
46 namespace Beagle.Daemon
48 class BuildIndex
50 static string [] argv;
52 static bool arg_recursive = false, arg_delete = false, arg_debug = false, arg_cache_text = false, arg_disable_filtering = false, arg_disable_restart = false, arg_disable_directories = false;
54 static Hashtable remap_table = new Hashtable ();
56 static string arg_output, arg_tag, arg_source;
58 /////////////////////////////////////////////////////////
60 // Files and directories that are allowed to be in the target
61 // directory before we blow it away. If we encounter any file
62 // or dir not in this list, we'll bail out.
63 static string [] allowed_files = {
64 "FileAttributesStore.db",
65 "fingerprint",
66 "version"
69 static string [] allowed_dirs = {
70 "Locks",
71 "PrimaryIndex",
72 "SecondaryIndex",
73 "TextCache"
76 /////////////////////////////////////////////////////////
78 static FileAttributesStore_Sqlite backing_fa_store;
79 static FileAttributesStore fa_store;
81 static LuceneIndexingDriver driver;
83 static bool crawling = true, indexing = true, shutdown = false, restart = false;
85 static ArrayList allowed_patterns = new ArrayList ();
86 static ArrayList denied_patterns = new ArrayList ();
88 static Queue pending_files = new Queue ();
89 static Queue pending_directories = new Queue ();
91 const int BATCH_SIZE = 30;
93 /////////////////////////////////////////////////////////
95 static void Main (string [] args)
97 try {
98 DoMain (args);
99 } catch (Exception ex) {
100 Logger.Log.Error (ex, "Unhandled exception thrown. Exiting immediately.");
101 Environment.Exit (1);
105 static void DoMain (string [] args)
107 SystemInformation.SetProcessName ("beagle-build-index");
109 if (args.Length < 2)
110 PrintUsage ();
112 int i = 0;
113 while (i < args.Length) {
115 string arg = args [i];
116 ++i;
117 string next_arg = i < args.Length ? args [i] : null;
119 switch (arg) {
120 case "-h":
121 case "--help":
122 PrintUsage ();
123 break;
125 case "--tag":
126 if (next_arg != null)
127 arg_tag = next_arg;
128 ++i;
129 break;
131 case "-r":
132 case "--recursive":
133 arg_recursive = true;
134 break;
136 case "--enable-deletion":
137 arg_delete = true;
138 break;
140 case "--disable-directories":
141 arg_disable_directories = true;
142 break;
144 case "--enable-text-cache":
145 arg_cache_text = true;
146 break;
149 case "--remap":
150 if (next_arg == null)
151 break;
153 int j = next_arg.IndexOf (":");
155 if (j == -1) {
156 Logger.Log.Error ("Invalid remap argument: {0}", next_arg);
157 Environment.Exit (1);
160 remap_table [next_arg.Substring (0, j)] = next_arg.Substring (j+1);
162 ++i;
163 break;
165 case "--target":
166 if (next_arg != null)
167 arg_output = Path.IsPathRooted (next_arg) ? next_arg : Path.GetFullPath (next_arg);
168 ++i;
169 break;
171 case "--disable-filtering":
172 arg_disable_filtering = true;
173 break;
175 case "--allow-pattern":
176 if (next_arg == null)
177 break;
179 if (next_arg.IndexOf (',') != -1) {
180 foreach (string pattern in next_arg.Split (','))
181 allowed_patterns.Add (new ExcludeItem (ExcludeType.Pattern, pattern));
183 } else {
184 allowed_patterns.Add (new ExcludeItem (ExcludeType.Pattern, next_arg));
187 ++i;
188 break;
190 case "--deny-pattern":
191 if (next_arg == null)
192 break;
194 if (next_arg.IndexOf (',') != -1) {
195 foreach (string pattern in next_arg.Split (','))
196 denied_patterns.Add (new ExcludeItem (ExcludeType.Pattern, pattern));
198 } else {
199 denied_patterns.Add (new ExcludeItem (ExcludeType.Pattern, next_arg));
202 ++i;
203 break;
205 case "--disable-restart":
206 arg_disable_restart = true;
207 break;
209 case "--source":
210 if (next_arg == null)
211 break;
213 arg_source = next_arg;
214 ++i;
215 break;
217 default:
218 string path = Path.IsPathRooted (arg) ? arg : Path.GetFullPath (arg);
219 if (path != "/" && path.EndsWith ("/"))
220 path = path.TrimEnd ('/');
222 if (Directory.Exists (path))
223 pending_directories.Enqueue (new DirectoryInfo (path));
224 else if (File.Exists (path))
225 pending_files.Enqueue (new FileInfo (path));
226 break;
230 argv = args;
232 /////////////////////////////////////////////////////////
234 if (arg_output == null) {
235 Logger.Log.Error ("--target must be specified");
236 Environment.Exit (1);
239 foreach (FileSystemInfo info in pending_directories) {
240 if (Path.GetFullPath (arg_output) == info.FullName) {
241 Logger.Log.Error ("Target directory cannot be one of the source paths.");
242 Environment.Exit (1);
246 foreach (FileSystemInfo info in pending_files) {
247 if (Path.GetFullPath (arg_output) == info.FullName) {
248 Logger.Log.Error ("Target directory cannot be one of the source paths.");
249 Environment.Exit (1);
253 if (!Directory.Exists (Path.GetDirectoryName (arg_output))) {
254 Logger.Log.Error ("Index directory not available for construction: {0}", arg_output);
255 Environment.Exit (1);
258 // Be *EXTRA PARANOID* about the contents of the target
259 // directory, because creating an indexing driver will
260 // nuke it.
261 if (Directory.Exists (arg_output)) {
263 foreach (FileInfo info in DirectoryWalker.GetFileInfos (arg_output)) {
264 if (Array.IndexOf (allowed_files, info.Name) == -1) {
265 Logger.Log.Error ("{0} doesn't look safe to delete: non-Beagle file {1} was found", arg_output, info.FullName);
266 Environment.Exit (1);
270 foreach (DirectoryInfo info in DirectoryWalker.GetDirectoryInfos (arg_output)) {
271 if (Array.IndexOf (allowed_dirs, info.Name) == -1) {
272 Logger.Log.Error ("{0} doesn't look safe to delete: non-Beagle directory {1} was found", arg_output, info.FullName);
273 Environment.Exit (1);
278 // Set the IO priority so we don't slow down the system
279 if (! IoPriority.SetIdle ())
280 IoPriority.SetIoPriority (7);
282 driver = new LuceneIndexingDriver (arg_output, false);
283 driver.TextCache = (arg_cache_text) ? new TextCache (arg_output) : null;
284 if (driver.TextCache != null)
285 driver.TextCache.WorldReadable = true;
287 backing_fa_store = new FileAttributesStore_Sqlite (driver.TopDirectory, driver.Fingerprint);
288 fa_store = new FileAttributesStore (backing_fa_store);
290 // Set up signal handlers
291 SetupSignalHandlers ();
293 Thread crawl_thread, index_thread, monitor_thread = null;
295 Stopwatch watch = new Stopwatch ();
296 watch.Start ();
298 // Start the thread that does the crawling
299 crawl_thread = ExceptionHandlingThread.Start (new ThreadStart (CrawlWorker));
301 // Start the thread that does the actual indexing
302 index_thread = ExceptionHandlingThread.Start (new ThreadStart (IndexWorker));
304 if (!arg_disable_restart) {
305 // Start the thread that monitors memory usage.
306 monitor_thread = ExceptionHandlingThread.Start (new ThreadStart (MemoryMonitorWorker));
309 // Join all the threads so that we know that we're the only thread still running
310 crawl_thread.Join ();
311 index_thread.Join ();
312 if (monitor_thread != null)
313 monitor_thread.Join ();
315 watch.Stop ();
316 Logger.Log.Debug ("Elapsed time {0}.", watch);
318 if (restart) {
319 Logger.Log.Debug ("Restarting helper");
320 Process p = new Process ();
321 p.StartInfo.UseShellExecute = false;
322 // FIXME: Maybe this isn't the right way to do things? It should be ok,
323 // the PATH is inherited from the shell script which runs mono itself.
324 p.StartInfo.FileName = "mono";
325 p.StartInfo.Arguments = String.Join (" ", Environment.GetCommandLineArgs ());
326 p.Start ();
330 /////////////////////////////////////////////////////////////////
332 static void CrawlWorker ()
334 Logger.Log.Debug ("Starting CrawlWorker");
336 try {
337 int count_dirs = 0;
338 int count_files = 0;
340 while (pending_directories.Count > 0) {
341 DirectoryInfo dir = (DirectoryInfo) pending_directories.Dequeue ();
343 if (! arg_disable_directories)
344 pending_files.Enqueue (dir);
346 try {
347 if (arg_recursive)
348 foreach (DirectoryInfo subdir in DirectoryWalker.GetDirectoryInfos (dir))
349 if (!Ignore (subdir)
350 && !FileSystem.IsSpecialFile (subdir.FullName))
351 pending_directories.Enqueue (subdir);
353 foreach (FileInfo file in DirectoryWalker.GetFileInfos (dir))
354 if (!Ignore (file)
355 && !FileSystem.IsSpecialFile (file.FullName)) {
356 pending_files.Enqueue (file);
357 count_files ++;
360 } catch (DirectoryNotFoundException e) {}
362 if (shutdown)
363 break;
365 count_dirs++;
368 Logger.Log.Debug ("Scanned {0} files and directories in {1} directories", count_dirs + count_files, count_dirs);
369 } finally {
370 Logger.Log.Debug ("CrawlWorker Done");
372 crawling = false;
376 /////////////////////////////////////////////////////////////////
378 static void AddToRequest (IndexerRequest request, Indexable indexable)
380 // Disable filtering and only index file attributes
381 if (arg_disable_filtering)
382 indexable.Filtering = IndexableFiltering.Never;
384 // Tag the item for easy identification (for say, removal)
385 if (arg_tag != null)
386 indexable.AddProperty (Property.NewUnsearched("Tag", arg_tag));
388 if (arg_source == null) {
389 DirectoryInfo dir = new DirectoryInfo (StringFu.SanitizePath (arg_output));
390 arg_source = dir.Name;
393 indexable.Source = arg_source;
395 request.Add (indexable);
398 static IndexerReceipt [] FlushIndexer (IIndexer indexer, IndexerRequest request)
400 IndexerReceipt [] receipts;
401 receipts = indexer.Flush (request);
403 ArrayList pending_children;
404 pending_children = new ArrayList ();
406 foreach (IndexerReceipt raw_r in receipts) {
408 if (raw_r is IndexerAddedReceipt) {
409 // Update the file attributes
410 IndexerAddedReceipt r = (IndexerAddedReceipt) raw_r;
412 Indexable indexable = request.GetByUri (r.Uri);
414 // We don't need to write out any file attributes for
415 // children.
416 if (indexable.ParentUri != null)
417 continue;
419 string path = r.Uri.LocalPath;
421 FileAttributes attr;
422 attr = fa_store.ReadOrCreate (path);
424 attr.LastWriteTime = indexable.Timestamp;
425 attr.FilterName = r.FilterName;
426 attr.FilterVersion = r.FilterVersion;
428 fa_store.Write (attr);
430 } else if (raw_r is IndexerRemovedReceipt) {
431 // Update the file attributes
432 IndexerRemovedReceipt r = (IndexerRemovedReceipt) raw_r;
434 Indexable indexable = request.GetByUri (r.Uri);
436 string path = r.Uri.LocalPath;
437 Logger.Log.Debug ("Removing: '{0}'", path);
438 fa_store.Drop (path);
440 } else if (raw_r is IndexerChildIndexablesReceipt) {
441 // Add any child indexables back into our indexer
442 IndexerChildIndexablesReceipt r = (IndexerChildIndexablesReceipt) raw_r;
443 pending_children.AddRange (r.Children);
447 request.Clear (); // clear out the old request
448 foreach (Indexable i in pending_children) // and then add the children
449 AddToRequest (request, i);
451 return receipts;
454 static Indexable FileToIndexable (FileInfo file)
456 if (!file.Exists || Ignore (file) || fa_store.IsUpToDate (file.FullName))
457 return null;
459 // Create the indexable and add the standard properties we
460 // use in the FileSystemQueryable.
461 Uri uri = UriFu.PathToFileUri (file.FullName);
462 Indexable indexable = new Indexable (uri);
463 indexable.Timestamp = file.LastWriteTimeUtc;
464 FSQ.AddStandardPropertiesToIndexable (indexable, file.Name, Guid.Empty, false);
466 // Store directory name in the index
467 string dirname = file.DirectoryName;
468 indexable.AddProperty (Property.NewUnsearched (ParentDirUriPropKey, UriFu.PathToFileUri (dirname)));
470 return indexable;
473 static Indexable DirectoryToIndexable (DirectoryInfo dir, Queue modified_directories)
475 if (!dir.Exists)
476 return null;
478 // Check if the directory information is stored in attributes store
479 // And if the mtime of the directory is same as that in the attributes store
480 FileAttributes attr = fa_store.Read (dir.FullName);
482 // If the directory exists in the fa store, then it is already indexed
483 if (attr != null) {
484 if (arg_delete && dir.LastWriteTimeUtc > attr.LastWriteTime)
485 modified_directories.Enqueue (dir);
486 return null;
489 // Create the indexable and add the standard properties we
490 // use in the FileSystemQueryable.
491 Uri uri = UriFu.PathToFileUri (dir.FullName);
492 Indexable indexable = new Indexable (uri);
493 indexable.MimeType = "inode/directory";
494 indexable.NoContent = true;
495 indexable.Timestamp = dir.LastWriteTimeUtc;
496 FSQ.AddStandardPropertiesToIndexable (indexable, dir.Name, Guid.Empty, false);
498 // Add directory name property
499 string dirname = dir.Parent.FullName;
500 indexable.AddProperty (Property.NewUnsearched (ParentDirUriPropKey, UriFu.PathToFileUri (dirname)));
502 indexable.AddProperty (Property.NewBool (IsDirectoryPropKey, true));
504 return indexable;
507 static void IndexWorker ()
509 Logger.Log.Debug ("Starting IndexWorker");
510 Queue modified_directories = new Queue ();
512 try {
513 Indexable indexable;
514 IndexerRequest pending_request;
515 pending_request = new IndexerRequest ();
517 while (!shutdown) {
518 if (pending_files.Count > 0) {
519 Object file_or_dir_info = pending_files.Dequeue ();
521 if (file_or_dir_info is DirectoryInfo)
522 indexable = DirectoryToIndexable ((DirectoryInfo) file_or_dir_info, modified_directories);
523 else
524 indexable = FileToIndexable ((FileInfo) file_or_dir_info);
526 if (indexable == null)
527 continue;
529 AddToRequest (pending_request, indexable);
531 if (pending_request.Count >= BATCH_SIZE) {
532 Logger.Log.Debug ("Flushing driver, {0} items in queue", pending_request.Count);
533 FlushIndexer (driver, pending_request);
534 // FlushIndexer clears the pending_request
537 } else if (crawling) {
538 //Logger.Log.Debug ("IndexWorker: La la la...");
539 Thread.Sleep (50);
540 } else {
541 break;
545 // Time to remove deleted directories from the index and attributes store
546 while (modified_directories.Count > 0) {
547 DirectoryInfo subdir = (DirectoryInfo) modified_directories.Dequeue ();
548 Logger.Log.Debug ("Checking {0} for deleted files and directories", subdir.FullName);
550 // Get a list of all documents from lucene index with ParentDirUriPropKey set as that of subdir
551 ICollection all_dirent = GetAllItemsInDirectory (subdir);
552 foreach (Dirent info in all_dirent) {
553 // check if the item exists
554 if (File.Exists (info.FullName) ||
555 (info.IsDirectory && Directory.Exists (info.FullName)))
556 continue;
558 if (info.IsDirectory)
559 // Recursively remove deleted subdirectories
560 modified_directories.Enqueue (new DirectoryInfo (info.FullName));
562 // remove
563 Uri uri = UriFu.PathToFileUri (info.FullName);
564 indexable = new Indexable (IndexableType.Remove, uri);
565 AddToRequest (pending_request, indexable);
569 // Call Flush until our request is empty. We have to do this in a loop
570 // because children can get added back to the pending request in a flush.
571 while (pending_request.Count > 0)
572 FlushIndexer (driver, pending_request);
574 backing_fa_store.Flush ();
576 Logger.Log.Debug ("Optimizing index");
577 driver.OptimizeNow ();
578 } finally {
579 Logger.Log.Debug ("IndexWorker Done");
581 indexing = false;
585 class Dirent {
586 private bool is_directory;
587 private string path;
589 public Dirent (string path, bool is_dir)
591 this.path = path;
592 this.is_directory = is_dir;
595 public bool IsDirectory {
596 get { return is_directory; }
599 public string Path {
600 get { return path; }
603 public string FullName {
604 get { return path.Substring (7); }
608 private class BitArrayHitCollector : LNS.HitCollector {
610 private BetterBitArray matches;
612 public BitArrayHitCollector (BetterBitArray matches)
614 this.matches = matches;
617 public override void Collect (int id, float score)
619 matches [id] = true;
623 private const string ParentDirUriPropKey = "beagle:ParentDirUri";
624 private const string IsDirectoryPropKey = "beagle:IsDirectory";
626 // Returns a list of all files and directories in dir
627 static ICollection GetAllItemsInDirectory (DirectoryInfo dir)
629 // form the query
630 string parent_uri_str = UriFu.PathToFileUri (dir.FullName).ToString ();
631 // Instead of taking the painfull way of using BeagleAnalyzer, lets just add the prefix manually
632 //parent_uri_str = "_:" + parent_uri_str;
633 // LuceneCommon thinks exposing secret property type encoding is bad, I think so too... except for now
634 string key = "prop:k:" + ParentDirUriPropKey;
635 //Logger.Log.Debug ("Querying for {0}={1}", parent_uri_str, key);
636 LNS.Query query = new LNS.TermQuery (new Term (key, parent_uri_str));
638 // do the search
639 LNS.IndexSearcher searcher;
640 searcher = LuceneCommon.GetSearcher (driver.PrimaryStore);
642 BetterBitArray matches;
643 matches = new BetterBitArray (searcher.MaxDoc ());
645 BitArrayHitCollector collector;
646 collector = new BitArrayHitCollector (matches);
648 searcher.Search (query, null, collector);
650 // Finally we pull all of the matching documents,
651 // convert them to Dirent, and store them in a list.
653 ArrayList match_list = new ArrayList ();
654 int i = 0;
655 while (i < matches.Count) {
657 i = matches.GetNextTrueIndex (i);
658 if (i >= matches.Count)
659 break;
661 Document doc;
662 doc = searcher.Doc (i);
664 Dirent info;
665 info = DocumentToDirent (doc);
667 match_list.Add (info);
669 ++i;
672 LuceneCommon.ReleaseSearcher (searcher);
673 //Logger.Log.Debug ("Found {0} items in {1}", match_list.Count, dir.FullName);
675 return match_list;
678 static private Dirent DocumentToDirent (Document doc)
680 string path;
681 bool is_dir = false;
683 path = doc.Get ("Uri");
685 string prop_key = "prop:k:" + IsDirectoryPropKey;
686 foreach (Field f in doc.Fields ()) {
687 if (f.Name () != prop_key)
688 continue;
690 is_dir = (f.StringValue ().Substring (2) == "true");
691 break;
694 //Logger.Log.Debug ("Found: " + path + " (" + is_dir + ")");
695 return new Dirent (path, is_dir);
698 /////////////////////////////////////////////////////////////////
700 static void MemoryMonitorWorker ()
702 int vmrss_original = SystemInformation.VmRss;
704 const double threshold = 6.0;
705 int last_vmrss = 0;
707 while (! shutdown && (crawling || indexing)) {
709 // Check resident memory usage
710 int vmrss = SystemInformation.VmRss;
711 double size = vmrss / (double) vmrss_original;
712 if (vmrss != last_vmrss)
713 Logger.Log.Debug ("Size: VmRSS={0:0.0} MB, size={1:0.00}, {2:0.0}%",
714 vmrss/1024.0, size, 100.0 * (size - 1) / (threshold - 1));
715 last_vmrss = vmrss;
716 if (size > threshold) {
717 Logger.Log.Debug ("Process too big, shutting down!");
718 restart = true;
719 shutdown = true;
720 return;
721 } else {
722 Thread.Sleep (3000);
727 /////////////////////////////////////////////////////////////////
729 // From BeagleDaemon.cs
731 static void SetupSignalHandlers ()
733 // Force OurSignalHandler to be JITed
734 OurSignalHandler (-1);
736 // Set up our signal handler
737 Mono.Unix.Native.Stdlib.signal (Mono.Unix.Native.Signum.SIGINT, OurSignalHandler);
738 Mono.Unix.Native.Stdlib.signal (Mono.Unix.Native.Signum.SIGTERM, OurSignalHandler);
739 if (Environment.GetEnvironmentVariable("BEAGLE_THERE_BE_NO_QUITTIN") == null)
740 Mono.Unix.Native.Stdlib.signal (Mono.Unix.Native.Signum.SIGQUIT, OurSignalHandler);
743 static void OurSignalHandler (int signal)
745 // This allows us to call OurSignalHandler w/o doing anything.
746 // We want to call it once to ensure that it is pre-JITed.
747 if (signal < 0)
748 return;
750 Logger.Log.Debug ("Shutdown Requested");
751 shutdown = true;
754 /////////////////////////////////////////////////////////////////
756 static void PrintUsage ()
758 string usage =
759 "beagle-build-index: Build an index.\n" +
760 "Web page: http://www.gnome.org/projects/beagle\n" +
761 "Copyright (C) 2005-2006 Novell, Inc.\n\n";
763 usage +=
764 "Usage: beagle-build-index [OPTIONS] --target <index_path> <path> [path]\n\n" +
766 "** WARNING **\n" +
767 "beagle-build-index will *delete all existing data* within the target\n" +
768 "directory. Ensure that the target path is set correctly before running.\n\n" +
770 "Options:\n" +
771 " --source [name]\t\tThe index's source name. Defaults to the target directory name\n" +
772 // FIXME: remap doesnt seem to be implemented !
773 // Implementing remap might some fixes to --enable-deletion, see IndexWorker
774 //" --remap [path1:path2]\t\tRemap data paths to fit target. \n" +
775 " --tag [tag]\t\t\tTag index data for identification.\n" +
776 " --recursive\t\t\tCrawl source path recursivly.\n" +
777 " --enable-deletion\t\tRemove deleted files and directories from index.\n" +
778 " --enable-text-cache\t\tBuild text-cache of documents used for snippets.\n" +
779 " --disable-directories\t\tDon't add directories to the index.\n" +
780 " --disable-filtering\t\tDisable all filtering of files. Only index attributes.\n" +
781 " --allow-pattern [pattern]\tOnly allow files that match the pattern to be indexed.\n" +
782 " --deny-pattern [pattern]\tKeep any files that match the pattern from being indexed.\n" +
783 " --disable-restart\t\tDon't restart when memory usage gets above a certain threshold.\n" +
784 " --debug\t\t\tEcho verbose debugging information.\n\n";
787 Console.WriteLine (usage);
788 Environment.Exit (0);
791 /////////////////////////////////////////////////////////
793 static Uri RemapUri (Uri uri)
795 // FIXME: This is ghetto
796 foreach (DictionaryEntry dict in remap_table) {
797 if (uri.LocalPath.IndexOf ((string) dict.Key) == -1)
798 continue;
799 return new Uri (uri.LocalPath.Replace ((string) dict.Key, (string) dict.Value));
801 return uri;
804 static bool Ignore (DirectoryInfo directory)
806 if (directory.Name.StartsWith ("."))
807 return true;
809 return false;
812 static bool Ignore (FileInfo file)
814 if (file.Name.StartsWith ("."))
815 return true;
817 if (FileSystem.IsSpecialFile (file.FullName))
818 return true;
820 if (allowed_patterns.Count > 0) {
821 foreach (ExcludeItem pattern in allowed_patterns)
822 if (pattern.IsMatch (file.Name))
823 return false;
825 return true;
828 foreach (ExcludeItem pattern in denied_patterns)
829 if (pattern.IsMatch (file.Name))
830 return true;
832 // FIXME: Add more stuff here
834 return false;