configure.in, AssemblyInfo.cs: For those unfortunate earthlings without libchm, libwv...
[beagle.git] / beagled / LuceneCommon.cs
blob58488be92c9afa7b2080f41c208a336c8821a93c
1 //
2 // LuceneCommon.cs
3 //
4 // Copyright (C) 2004-2005 Novell, Inc.
5 //
7 //
8 // Permission is hereby granted, free of charge, to any person obtaining a
9 // copy of this software and associated documentation files (the "Software"),
10 // to deal in the Software without restriction, including without limitation
11 // the rights to use, copy, modify, merge, publish, distribute, sublicense,
12 // and/or sell copies of the Software, and to permit persons to whom the
13 // Software is furnished to do so, subject to the following conditions:
15 // The above copyright notice and this permission notice shall be included in
16 // all copies or substantial portions of the Software.
18 // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
19 // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
20 // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
21 // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
22 // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
23 // FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
24 // DEALINGS IN THE SOFTWARE.
27 using System;
28 using System.Collections;
29 using System.Diagnostics;
30 using System.Globalization;
31 using System.IO;
32 using System.Text;
33 using System.Threading;
34 using System.Xml;
35 using System.Xml.Serialization;
37 using Lucene.Net.Analysis;
38 using Lucene.Net.Analysis.Standard;
39 using Lucene.Net.Documents;
40 using Lucene.Net.Index;
41 using Lucene.Net.QueryParsers;
42 using LNS = Lucene.Net.Search;
44 using Beagle.Util;
46 namespace Beagle.Daemon {
48 public class LuceneCommon {
50 public delegate bool HitFilter (Hit hit);
52 // VERSION HISTORY
53 // ---------------
55 // 1: Original
56 // 2: Changed format of timestamp strings
57 // 3: Schema changed to be more Dashboard-Match-like
58 // 4: Schema changed for files to include _Directory property
59 // 5: Changed analyzer to support stemming. Bumped version # to
60 // force everyone to re-index.
61 // 6: lots of schema changes as part of the general refactoring
62 // 7: incremented to force a re-index after our upgrade to lucene 1.4
63 // (in theory the file formats are compatible, we are seeing 'term
64 // out of order' exceptions in some cases)
65 // 8: another forced re-index, this time because of massive changes
66 // in the file system backend (it would be nice to have per-backend
67 // versioning so that we didn't have to purge all indexes just
68 // because one changed)
69 // 9: changed the way properties are stored, changed in conjunction
70 // with sane handling of multiple properties on hits.
71 // 10: changed to support typed and mutable properties
72 // 11: moved mime type and hit type into properties
73 // 12: added year-month and year-month-day resolutions for all
74 // date properties
75 // 13: moved source into a property
76 // 14: allow wildcard queries to also match keywords
77 // 15: analyze PropertyKeyword field, and store all properties as
78 // lower case so that we're truly case insensitive.
79 // 16: add inverted timestamp to make querying substantially faster
80 private const int MAJOR_VERSION = 16;
81 private int minor_version = 0;
83 private string index_name;
84 private string top_dir;
86 private string fingerprint;
87 private int last_item_count = -1;
89 // This is the big index, containing document full-texts and
90 // data that is expensive to index.
91 private Lucene.Net.Store.Directory primary_store = null;
93 // This is the small index, containing document info that we
94 // expect to have change. Canonical example: file names.
95 private Lucene.Net.Store.Directory secondary_store = null;
97 //////////////////////////////////////////////////////////////////////////////
99 protected LuceneCommon (string index_name, int minor_version)
101 this.index_name = index_name;
102 this.minor_version = minor_version;
104 this.top_dir = (Path.IsPathRooted (index_name)) ? index_name : Path.Combine (PathFinder.IndexDir, index_name);
107 //////////////////////////////////////////////////////////////////////////////
109 protected string IndexName { get { return index_name; } }
111 public Lucene.Net.Store.Directory PrimaryStore { get { return primary_store; } }
113 public Lucene.Net.Store.Directory SecondaryStore { get { return secondary_store; } }
115 public string Fingerprint { get { return fingerprint; } }
117 public string TopDirectory { get { return top_dir; } }
119 //////////////////////////////////////////////////////////////////////////////
121 protected TextCache text_cache = null;
123 public TextCache TextCache {
124 get { return text_cache; }
125 set { text_cache = value; }
128 //////////////////////////////////////////////////////////////////////////////
130 private string VersionFile {
131 get { return Path.Combine (top_dir, "version"); }
134 private string FingerprintFile {
135 get { return Path.Combine (top_dir, "fingerprint"); }
138 // Shouldn't really be public
139 public string PrimaryIndexDirectory {
140 get { return Path.Combine (top_dir, "PrimaryIndex"); }
143 // Shouldn't really be public
144 public string SecondaryIndexDirectory {
145 get { return Path.Combine (top_dir, "SecondaryIndex"); }
148 public string LockDirectory {
149 get { return Path.Combine (top_dir, "Locks"); }
152 //////////////////////////////////////////////////////////////////////////////
154 // Deal with dangling locks
156 private bool IsDanglingLock (FileInfo info)
158 Log.Debug ("Checking for dangling locks...");
160 // It isn't even a lock file
161 if (! info.Name.EndsWith (".lock"))
162 return false;
164 StreamReader reader;
165 string pid = null;
167 try {
168 reader = new StreamReader (info.FullName);
169 pid = reader.ReadLine ();
170 reader.Close ();
172 } catch {
173 // We couldn't read the lockfile, so it probably went away.
174 return false;
178 if (pid == null) {
179 // Looks like the lock file was empty, which really
180 // shouldn't happen. It should contain the PID of
181 // the process which locked it. Lets be on the safe
182 // side and assume it's a dangling lock.
183 Log.Warn ("Found an empty lock file, that shouldn't happen: {0}", info.FullName);
184 return true;
187 string cmdline_file;
188 cmdline_file = String.Format ("/proc/{0}/cmdline", pid);
190 string cmdline = "";
191 try {
192 reader = new StreamReader (cmdline_file);
193 cmdline = reader.ReadLine ();
194 reader.Close ();
195 } catch {
196 // If we can't open that file, either:
197 // (1) The process doesn't exist
198 // (2) It does exist, but it doesn't belong to us.
199 // Thus it isn't an IndexHelper
200 // In either case, the lock is dangling --- if it
201 // still exists.
202 return info.Exists;
205 // The process exists, but isn't an IndexHelper.
206 // If the lock file is still there, it is dangling.
207 // FIXME: During one run of bludgeon I got a null reference
208 // exception here, so I added the cmdline == null check.
209 // Why exactly would that happen? Is this logic correct
210 // in that (odd and presumably rare) case?
211 if (cmdline == null || cmdline.IndexOf ("IndexHelper.exe") == -1)
212 return info.Exists;
214 // If we reach this point, we know:
215 // (1) The process still exists
216 // (2) We own it
217 // (3) It is an IndexHelper process
218 // Thus it almost certainly isn't a dangling lock.
219 // The process might be wedged, but that is
220 // another issue...
221 return false;
224 protected bool Exists ()
226 if (! (Directory.Exists (top_dir)
227 && File.Exists (VersionFile)
228 && File.Exists (FingerprintFile)
229 && Directory.Exists (PrimaryIndexDirectory)
230 && IndexReader.IndexExists (PrimaryIndexDirectory)
231 && Directory.Exists (SecondaryIndexDirectory)
232 && IndexReader.IndexExists (SecondaryIndexDirectory)
233 && Directory.Exists (LockDirectory)))
234 return false;
236 // Check the index's version number. If it is wrong,
237 // declare the index non-existent.
239 StreamReader version_reader;
240 string version_str;
241 version_reader = new StreamReader (VersionFile);
242 version_str = version_reader.ReadLine ();
243 version_reader.Close ();
245 int current_major_version, current_minor_version;
246 int i = version_str.IndexOf ('.');
248 if (i != -1) {
249 current_major_version = Convert.ToInt32 (version_str.Substring (0, i));
250 current_minor_version = Convert.ToInt32 (version_str.Substring (i+1));
251 } else {
252 current_minor_version = Convert.ToInt32 (version_str);
253 current_major_version = 0;
256 if (current_major_version != MAJOR_VERSION
257 || (minor_version >= 0 && current_minor_version != minor_version)) {
258 Logger.Log.Debug ("Version mismatch in {0}", index_name);
259 Logger.Log.Debug ("Index has version {0}.{1}, expected {2}.{3}",
260 current_major_version, current_minor_version,
261 MAJOR_VERSION, minor_version);
262 return false;
265 // Check the lock directory: If there is a dangling write lock,
266 // assume that the index is corrupted and declare it non-existent.
267 DirectoryInfo lock_dir_info;
268 lock_dir_info = new DirectoryInfo (LockDirectory);
269 foreach (FileInfo info in lock_dir_info.GetFiles ()) {
270 if (IsDanglingLock (info)) {
271 Logger.Log.Warn ("Found a dangling index lock on {0}", info.FullName);
272 return false;
276 return true;
279 private Lucene.Net.Store.Directory CreateIndex (string path)
281 // Create a directory to put the index in.
282 Directory.CreateDirectory (path);
284 // Create a new store.
285 Lucene.Net.Store.Directory store;
286 store = Lucene.Net.Store.FSDirectory.GetDirectory (path, LockDirectory, true);
288 // Create an empty index in that store.
289 IndexWriter writer;
290 writer = new IndexWriter (store, null, true);
291 writer.Close ();
293 return store;
296 // Create will kill your index dead. Use it with care.
297 // You don't need to call Open after calling Create.
298 protected void Create ()
300 if (minor_version < 0)
301 minor_version = 0;
303 // Purge any existing directories.
304 if (Directory.Exists (top_dir)) {
305 Logger.Log.Debug ("Purging {0}", top_dir);
306 Directory.Delete (top_dir, true);
309 // Create any necessary directories.
310 Directory.CreateDirectory (top_dir);
311 Directory.CreateDirectory (LockDirectory);
313 // Create the indexes.
314 primary_store = CreateIndex (PrimaryIndexDirectory);
315 secondary_store = CreateIndex (SecondaryIndexDirectory);
317 // Generate and store the index fingerprint.
318 fingerprint = GuidFu.ToShortString (Guid.NewGuid ());
319 TextWriter writer;
320 writer = new StreamWriter (FingerprintFile, false);
321 writer.WriteLine (fingerprint);
322 writer.Close ();
324 // Store our index version information.
325 writer = new StreamWriter (VersionFile, false);
326 writer.WriteLine ("{0}.{1}", MAJOR_VERSION, minor_version);
327 writer.Close ();
330 protected void Open ()
332 Open (false);
335 protected void Open (bool read_only_mode)
337 // Read our index fingerprint.
338 TextReader reader;
339 reader = new StreamReader (FingerprintFile);
340 fingerprint = reader.ReadLine ();
341 reader.Close ();
343 // Create stores for our indexes.
344 primary_store = Lucene.Net.Store.FSDirectory.GetDirectory (PrimaryIndexDirectory, LockDirectory, false, read_only_mode);
345 secondary_store = Lucene.Net.Store.FSDirectory.GetDirectory (SecondaryIndexDirectory, LockDirectory, false, read_only_mode);
348 ////////////////////////////////////////////////////////////////
351 // Custom Analyzers
354 private class SingletonTokenStream : TokenStream {
356 private string singleton_str;
358 public SingletonTokenStream (string singleton_str)
360 this.singleton_str = singleton_str;
363 override public Lucene.Net.Analysis.Token Next ()
365 if (singleton_str == null)
366 return null;
368 Lucene.Net.Analysis.Token token;
369 token = new Lucene.Net.Analysis.Token (singleton_str, 0, singleton_str.Length);
371 singleton_str = null;
373 return token;
377 // FIXME: This assumes everything being indexed is in English!
378 private class BeagleAnalyzer : StandardAnalyzer {
380 private char [] buffer = new char [2];
381 private bool strip_extra_property_info = false;
383 public BeagleAnalyzer (bool strip_extra_property_info)
385 this.strip_extra_property_info = strip_extra_property_info;
388 public override TokenStream TokenStream (string fieldName, TextReader reader)
390 bool is_text_prop = false;
392 // Strip off the first two characters in a property.
393 // We store type information in those two characters, so we don't
394 // want to index them.
395 if (fieldName.StartsWith ("prop:")) {
397 if (strip_extra_property_info) {
398 // Skip everything up to and including the first :
399 int c;
400 do {
401 c = reader.Read ();
402 } while (c != -1 && c != ':');
405 is_text_prop = fieldName.StartsWith ("prop:t");
407 // If this is non-text property, just return one token
408 // containing the entire string. We do this to avoid
409 // tokenizing keywords.
410 if (! is_text_prop) {
411 // We don't want to lower case the token if it's
412 // not in the private namespace.
414 TokenStream singleton_stream = new SingletonTokenStream (reader.ReadToEnd ());
416 if (fieldName.StartsWith ("prop:k:" + LuceneQueryingDriver.PrivateNamespace))
417 return singleton_stream;
418 else
419 return new LowerCaseFilter (singleton_stream);
421 } else if (fieldName == "PropertyKeyword")
422 return new LowerCaseFilter (new SingletonTokenStream (reader.ReadToEnd ()));
424 TokenStream outstream;
425 outstream = base.TokenStream (fieldName, reader);
427 if (fieldName == "Text"
428 || fieldName == "HotText"
429 || fieldName == "PropertyText"
430 || is_text_prop) {
431 outstream = new NoiseEmailHostFilter (outstream);
432 outstream = new PorterStemFilter (outstream);
435 return outstream;
439 static private Analyzer indexing_analyzer = new BeagleAnalyzer (true);
440 static private Analyzer query_analyzer = new BeagleAnalyzer (false);
442 static protected Analyzer IndexingAnalyzer { get { return indexing_analyzer; } }
443 static protected Analyzer QueryAnalyzer { get { return query_analyzer; } }
445 ////////////////////////////////////////////////////////////////
448 // Dealing with properties
451 static private char TypeToCode (PropertyType type)
453 switch (type) {
454 case PropertyType.Text: return 't';
455 case PropertyType.Keyword: return 'k';
456 case PropertyType.Date: return 'd';
458 throw new Exception ("Bad property type: " + type);
461 static private PropertyType CodeToType (char c)
463 switch (c) {
464 case 't': return PropertyType.Text;
465 case 'k': return PropertyType.Keyword;
466 case 'd': return PropertyType.Date;
469 throw new Exception ("Bad property code: " + c);
472 static private string TypeToWildcardField (PropertyType type)
474 switch (type) {
475 case PropertyType.Text: return "PropertyText";
476 case PropertyType.Keyword: return "PropertyKeyword";
477 case PropertyType.Date: return "PropertyDate";
480 return null;
483 // Exposing this is a little bit suspicious.
484 static protected string PropertyToFieldName (PropertyType type, string key)
486 return String.Format ("prop:{0}:{1}", TypeToCode (type), key);
490 static private void AddDateFields (string field_name, Property prop, Document doc)
492 DateTime dt = StringFu.StringToDateTime (prop.Value);
494 Field f;
495 f = new Field ("YM:" + field_name,
496 StringFu.DateTimeToYearMonthString (dt),
497 false, // never store
498 true, // always index
499 false); // never tokenize
500 doc.Add (f);
502 f = new Field ("D:" + field_name,
503 StringFu.DateTimeToDayString (dt),
504 false, // never store
505 true, // always index
506 false); // never tokenize
507 doc.Add (f);
510 static protected void AddPropertyToDocument (Property prop, Document doc)
512 if (prop == null || prop.Value == null)
513 return;
515 // Don't actually put properties in the UnindexedNamespace
516 // in the document. A horrible (and yet lovely!) hack.
517 if (prop.Key.StartsWith (StringFu.UnindexedNamespace))
518 return;
520 Field f;
522 if (prop.IsSearched) {
523 string wildcard_field = TypeToWildcardField (prop.Type);
524 if (wildcard_field != null) {
525 f = new Field (wildcard_field,
526 prop.Value,
527 false, // never stored
528 true, // always indexed
529 true); // always tokenize (just lowercases for keywords; full analysis for text)
530 doc.Add (f);
532 if (prop.Type == PropertyType.Date)
533 AddDateFields (wildcard_field, prop, doc);
537 string coded_value;
538 coded_value = String.Format ("{0}:{1}",
539 prop.IsSearched ? 's' : '_',
540 prop.Value);
542 string field_name = PropertyToFieldName (prop.Type, prop.Key);
544 f = new Field (field_name,
545 coded_value,
546 prop.IsStored,
547 true, // always index
548 true); // always tokenize (strips off type code for keywords and lowercases)
549 doc.Add (f);
551 if (prop.Type == PropertyType.Date)
552 AddDateFields (field_name, prop, doc);
555 static protected Property GetPropertyFromDocument (Field f, Document doc, bool from_primary_index)
557 // Note: we don't use the document that we pass in,
558 // but in theory we could. At some later point we
559 // might need to split a property's data across two or
560 // more fields in the document.
562 if (f == null)
563 return null;
565 string field_name;
566 field_name = f.Name ();
567 if (field_name.Length < 7
568 || ! field_name.StartsWith ("prop:"))
569 return null;
571 string field_value;
572 field_value = f.StringValue ();
574 Property prop;
575 prop = new Property ();
576 prop.Type = CodeToType (field_name [5]);
577 prop.Key = field_name.Substring (7);
578 prop.Value = field_value.Substring (2);
579 prop.IsSearched = (field_value [0] == 's');
580 prop.IsMutable = ! from_primary_index;
581 prop.IsStored = f.IsStored ();
583 return prop;
586 //////////////////////////////////////////////////////////////////////////////
589 // Dealing with documents
592 static protected void BuildDocuments (Indexable indexable,
593 out Document primary_doc,
594 out Document secondary_doc)
596 primary_doc = new Document ();
597 secondary_doc = null;
599 Field f;
601 f = Field.Keyword ("Uri", UriFu.UriToEscapedString (indexable.Uri));
602 primary_doc.Add (f);
604 if (indexable.ParentUri != null) {
605 f = Field.Keyword ("ParentUri", UriFu.UriToEscapedString (indexable.ParentUri));
606 primary_doc.Add (f);
609 if (indexable.ValidTimestamp) {
610 // Note that we also want to search in the
611 // Timestamp field when we do a wildcard date
612 // query, so that's why we also add a wildcard
613 // field for each item here.
615 string wildcard_field = TypeToWildcardField (PropertyType.Date);
617 string str = StringFu.DateTimeToString (indexable.Timestamp);
618 f = Field.Keyword ("Timestamp", str);
619 primary_doc.Add (f);
620 f = Field.UnStored (wildcard_field, str);
621 primary_doc.Add (f);
623 // Create an inverted timestamp so that we can
624 // sort by timestamp at search-time.
625 long timeval = Convert.ToInt64 (str);
626 f = Field.UnStored ("InvertedTimestamp", (Int64.MaxValue - timeval).ToString ());
627 primary_doc.Add (f);
629 str = StringFu.DateTimeToYearMonthString (indexable.Timestamp);
630 f = Field.Keyword ("YM:Timestamp", str);
631 primary_doc.Add (f);
632 f = Field.UnStored ("YM:" + wildcard_field, str);
633 primary_doc.Add (f);
635 str = StringFu.DateTimeToDayString (indexable.Timestamp);
636 f = Field.Keyword ("D:Timestamp", str);
637 primary_doc.Add (f);
638 f = Field.UnStored ("D:" + wildcard_field, str);
639 primary_doc.Add (f);
642 if (indexable.NoContent) {
643 // If there is no content, make a note of that
644 // in a special property.
645 Property prop;
646 prop = Property.NewBool ("beagle:NoContent", true);
647 AddPropertyToDocument (prop, primary_doc);
649 } else {
651 // Since we might have content, add our text
652 // readers.
654 TextReader reader;
656 reader = indexable.GetTextReader ();
657 if (reader != null) {
658 f = Field.Text ("Text", reader);
659 primary_doc.Add (f);
662 reader = indexable.GetHotTextReader ();
663 if (reader != null) {
664 f = Field.Text ("HotText", reader);
665 primary_doc.Add (f);
669 // Store the Type and MimeType in special properties
671 if (indexable.HitType != null) {
672 Property prop;
673 prop = Property.NewUnsearched ("beagle:HitType", indexable.HitType);
674 AddPropertyToDocument (prop, primary_doc);
677 if (indexable.MimeType != null) {
678 Property prop;
679 prop = Property.NewUnsearched ("beagle:MimeType", indexable.MimeType);
680 AddPropertyToDocument (prop, primary_doc);
683 if (indexable.Source != null) {
684 Property prop;
685 prop = Property.NewUnsearched ("beagle:Source", indexable.Source);
686 AddPropertyToDocument (prop, primary_doc);
689 // Store the other properties
691 foreach (Property prop in indexable.Properties) {
692 Document target_doc = primary_doc;
693 if (prop.IsMutable) {
694 if (secondary_doc == null) {
695 secondary_doc = new Document ();
696 f = Field.Keyword ("Uri", UriFu.UriToEscapedString (indexable.Uri));
697 secondary_doc.Add (f);
699 target_doc = secondary_doc;
702 AddPropertyToDocument (prop, target_doc);
706 static protected Document RewriteDocument (Document old_secondary_doc,
707 Indexable prop_only_indexable)
709 Hashtable seen_props;
710 seen_props = new Hashtable ();
712 Document new_doc;
713 new_doc = new Document ();
715 Field uri_f;
716 uri_f = Field.Keyword ("Uri", UriFu.UriToEscapedString (prop_only_indexable.Uri));
717 new_doc.Add (uri_f);
719 Logger.Log.Debug ("Rewriting {0}", prop_only_indexable.DisplayUri);
721 // Add the new properties to the new document. To
722 // delete a property, set the Value to null... then it
723 // will be added to seen_props (so the old value will
724 // be ignored below), but AddPropertyToDocument will
725 // return w/o doing anything.
726 foreach (Property prop in prop_only_indexable.Properties) {
727 seen_props [prop.Key] = prop;
728 AddPropertyToDocument (prop, new_doc);
729 Logger.Log.Debug ("New prop '{0}' = '{1}'", prop.Key, prop.Value);
732 // Copy the other properties from the old document to the
733 // new one, skipping any properties that we got new values
734 // for out of the Indexable.
735 if (old_secondary_doc != null) {
736 foreach (Field f in old_secondary_doc.Fields ()) {
737 Property prop;
738 prop = GetPropertyFromDocument (f, old_secondary_doc, false);
739 if (prop != null && ! seen_props.Contains (prop.Key)) {
740 Logger.Log.Debug ("Old prop '{0}' = '{1}'", prop.Key, prop.Value);
741 AddPropertyToDocument (prop, new_doc);
746 return new_doc;
749 static protected Uri GetUriFromDocument (Document doc)
751 string uri;
752 uri = doc.Get ("Uri");
753 if (uri == null)
754 throw new Exception ("Got document from Lucene w/o a URI!");
755 return UriFu.EscapedStringToUri (uri);
758 static protected Hit DocumentToHit (Document doc)
760 Hit hit;
761 hit = new Hit ();
763 hit.Uri = GetUriFromDocument (doc);
765 string str;
766 str = doc.Get ("ParentUri");
767 if (str != null)
768 hit.ParentUri = UriFu.EscapedStringToUri (str);
770 hit.Timestamp = StringFu.StringToDateTime (doc.Get ("Timestamp"));
772 AddPropertiesToHit (hit, doc, true);
774 // Get the Type and MimeType from the properties.
775 hit.Type = hit.GetFirstProperty ("beagle:HitType");
776 hit.MimeType = hit.GetFirstProperty ("beagle:MimeType");
777 hit.Source = hit.GetFirstProperty ("beagle:Source");
779 return hit;
782 static protected void AddPropertiesToHit (Hit hit, Document doc, bool from_primary_index)
784 foreach (Field f in doc.Fields ()) {
785 Property prop;
786 prop = GetPropertyFromDocument (f, doc, from_primary_index);
787 if (prop != null)
788 hit.AddProperty (prop);
793 //////////////////////////////////////////////////////////////////////////////
796 // Handle the index's item count
799 public int GetItemCount ()
801 if (last_item_count < 0) {
802 IndexReader reader;
803 reader = GetReader (PrimaryStore);
804 last_item_count = reader.NumDocs ();
805 ReleaseReader (reader);
807 return last_item_count;
810 // We should set the cached count of index items when IndexReaders
811 // are open and available, so calls to GetItemCount will return immediately.
813 protected bool HaveItemCount { get { return last_item_count >= 0; } }
815 protected void SetItemCount (IndexReader reader)
817 last_item_count = reader.NumDocs ();
820 public void SetItemCount (int count)
822 last_item_count = count;
825 protected void AdjustItemCount (int delta)
827 if (last_item_count >= 0)
828 last_item_count += delta;
831 //////////////////////////////////////////////////////////////////////////////
834 // Access to the stemmer and list of stop words
837 static PorterStemmer stemmer = new PorterStemmer ();
839 static public string Stem (string str)
841 return stemmer.Stem (str);
844 public static bool IsStopWord (string stemmed_word)
846 return ArrayFu.IndexOfString (StopAnalyzer.ENGLISH_STOP_WORDS, stemmed_word) != -1;
849 //////////////////////////////////////////////////////////////////////////////
852 // Special Hit Filtering classes
855 static private bool TrueHitFilter (Hit hit)
857 return true;
860 static private HitFilter true_hit_filter = new HitFilter (TrueHitFilter);
862 public class OrHitFilter {
864 private ArrayList all = new ArrayList ();
865 private bool contains_known_true = false;
867 public void Add (HitFilter hit_filter)
869 if (hit_filter == true_hit_filter)
870 contains_known_true = true;
871 all.Add (hit_filter);
874 public bool HitFilter (Hit hit)
876 if (contains_known_true)
877 return true;
878 foreach (HitFilter hit_filter in all)
879 if (hit_filter (hit))
880 return true;
881 return false;
885 public class AndHitFilter {
887 private ArrayList all = new ArrayList ();
889 public void Add (HitFilter hit_filter)
891 all.Add (hit_filter);
894 public bool HitFilter (Hit hit)
896 foreach (HitFilter hit_filter in all)
897 if (! hit_filter (hit))
898 return false;
899 return true;
903 public class NotHitFilter {
904 HitFilter original;
906 public NotHitFilter (HitFilter original)
908 this.original = original;
911 public bool HitFilter (Hit hit)
913 return ! original (hit);
917 //////////////////////////////////////////////////////////////////////////////
920 // Queries
923 static private LNS.Query StringToQuery (string field_name,
924 string text,
925 ArrayList term_list)
927 ArrayList tokens = new ArrayList ();
929 // Use the analyzer to extract the query's tokens.
930 // This code is taken from Lucene's query parser.
931 TokenStream source = QueryAnalyzer.TokenStream (field_name, new StringReader (text));
932 while (true) {
933 Lucene.Net.Analysis.Token token;
934 try {
935 token = source.Next ();
936 if (token == null)
937 break;
938 } catch (IOException) {
939 break;
941 if (token != null)
942 tokens.Add (token.TermText ());
944 try {
945 source.Close ();
946 } catch (IOException) {
947 // ignore
950 if (tokens.Count == 0)
951 return null;
953 LNS.PhraseQuery query = new LNS.PhraseQuery ();
955 foreach (string token in tokens) {
956 Term term;
957 term = new Term (field_name, token);
958 query.Add (term);
959 if (term_list != null)
960 term_list.Add (term);
963 return query;
967 // Date Range Handling
970 // This function will break down dates to discrete chunks of
971 // time to avoid expanding RangeQuerys as much as possible.
972 // For example, searching for
974 // YMD(5 May 2005, 16 Oct 2006)
976 // would break down into three queries:
978 // (YM(May 2005) AND D(5,31)) OR
979 // YM(Jun 2005, Sep 2006) OR
980 // (YM(Oct 2006) AND D(1,16))
982 static private DateTime lower_bound = new DateTime (1970, 1, 1);
984 // FIXME: we should probably boost this sometime around 2030.
985 // Mark your calendar.
986 static private DateTime upper_bound = new DateTime (2038, 12, 31);
988 static private Term NewYearMonthTerm (string field_name, int y, int m)
990 return new Term ("YM:" + field_name, String.Format ("{0}{1:00}", y, m));
993 static private LNS.Query NewYearMonthQuery (string field_name, int y, int m)
995 return new LNS.TermQuery (NewYearMonthTerm (field_name, y, m));
998 static private LNS.Query NewYearMonthQuery (string field_name, int y1, int m1, int y2, int m2)
1000 return new LNS.RangeQuery (NewYearMonthTerm (field_name, y1, m1),
1001 NewYearMonthTerm (field_name, y2, m2),
1002 true); // query is inclusive
1005 static private Term NewDayTerm (string field_name, int d)
1007 return new Term ("D:" + field_name, String.Format ("{0:00}", d));
1010 static private LNS.Query NewDayQuery (string field_name, int d1, int d2)
1012 return new LNS.RangeQuery (NewDayTerm (field_name, d1),
1013 NewDayTerm (field_name, d2),
1014 true); // query is inclusive
1017 private class DateRangeHitFilter {
1018 public string Key;
1019 public DateTime StartDate;
1020 public DateTime EndDate;
1022 public bool HitFilter (Hit hit)
1024 // First, check the Timestamp
1025 if (Key == QueryPart_DateRange.AllPropertiesKey
1026 || Key == QueryPart_DateRange.TimestampKey) {
1027 DateTime dt;
1028 dt = hit.Timestamp;
1029 if (StartDate <= dt && dt <= EndDate)
1030 return true;
1031 if (Key == QueryPart_DateRange.TimestampKey)
1032 return false;
1035 if (Key == QueryPart_DateRange.AllPropertiesKey) {
1036 // Walk through all of the properties, and see if any
1037 // date properties fall inside the range.
1038 foreach (Property prop in hit.Properties) {
1039 if (prop.Type == PropertyType.Date) {
1040 DateTime dt;
1041 dt = StringFu.StringToDateTime (prop.Value);
1042 if (StartDate <= dt && dt <= EndDate)
1043 return true;
1046 return false;
1047 } else {
1048 // Walk through all of the properties with the given key,
1049 // and see if any of them fall inside of the range.
1050 string[] values;
1051 values = hit.GetProperties (Key);
1052 foreach (string v in values) {
1053 DateTime dt;
1054 dt = StringFu.StringToDateTime (v);
1055 if (StartDate <= dt && dt <= EndDate)
1056 return true;
1058 return false;
1063 static private LNS.Query GetDateRangeQuery (QueryPart_DateRange part, out HitFilter hit_filter)
1065 string field_name;
1066 if (part.Key == QueryPart_DateRange.AllPropertiesKey)
1067 field_name = TypeToWildcardField (PropertyType.Date);
1068 else if (part.Key == QueryPart_DateRange.TimestampKey)
1069 field_name = "Timestamp";
1070 else
1071 field_name = PropertyToFieldName (PropertyType.Date, part.Key);
1073 // FIXME: We could optimize this and reduce the size of our range
1074 // queries if we actually new the min and max date that appear in
1075 // any properties in the index. We would need to inspect the index to
1076 // determine that at start-up, and then track it as new documents
1077 // get added to the index.
1078 if (part.StartDate < lower_bound)
1079 part.StartDate = lower_bound;
1080 if (part.EndDate > upper_bound || part.EndDate == DateTime.MinValue)
1081 part.EndDate = upper_bound;
1083 // Swap the start and end dates if they come in reversed.
1084 if (part.StartDate > part.EndDate) {
1085 DateTime swap;
1086 swap = part.StartDate;
1087 part.StartDate = part.EndDate;
1088 part.EndDate = swap;
1091 // Set up our hit filter to cull out the bad dates.
1092 DateRangeHitFilter drhf;
1093 drhf = new DateRangeHitFilter ();
1094 drhf.Key = part.Key;
1095 drhf.StartDate = part.StartDate;
1096 drhf.EndDate = part.EndDate;
1097 hit_filter = new HitFilter (drhf.HitFilter);
1099 Logger.Log.Debug ("Building new date range query");
1100 Logger.Log.Debug ("Start: {0}", part.StartDate);
1101 Logger.Log.Debug ("End: {0}", part.EndDate);
1103 int y1, m1, d1, y2, m2, d2;
1104 y1 = part.StartDate.Year;
1105 m1 = part.StartDate.Month;
1106 d1 = part.StartDate.Day;
1107 y2 = part.EndDate.Year;
1108 m2 = part.EndDate.Month;
1109 d2 = part.EndDate.Day;
1111 LNS.BooleanQuery top_level_query;
1112 top_level_query = new LNS.BooleanQuery ();
1114 // A special case: both the start and the end of our range fall
1115 // in the same month.
1116 if (y1 == y2 && m1 == m2) {
1117 LNS.Query ym_query;
1118 ym_query = NewYearMonthQuery (field_name, y1, m1);
1120 // If our range only covers a part of the month, do a range query on the days.
1121 if (d1 != 1 || d2 != DateTime.DaysInMonth (y2, m2)) {
1122 LNS.BooleanQuery sub_query;
1123 sub_query = new LNS.BooleanQuery ();
1124 sub_query.Add (ym_query, true, false);
1125 sub_query.Add (NewDayQuery (field_name, d1, d2), true, false);
1126 top_level_query.Add (sub_query, false, false);
1127 } else {
1128 top_level_query.Add (ym_query, false, false);
1131 } else {
1133 // Handle a partial month at the beginning of our range.
1134 if (d1 > 1) {
1135 LNS.BooleanQuery sub_query;
1136 sub_query = new LNS.BooleanQuery ();
1137 sub_query.Add (NewYearMonthQuery (field_name, y1, m1), true, false);
1138 sub_query.Add (NewDayQuery (field_name, d1, DateTime.DaysInMonth (y1, m1)), true, false);
1139 top_level_query.Add (sub_query, false, false);
1141 ++m1;
1142 if (m1 == 13) {
1143 m1 = 1;
1144 ++y1;
1148 // And likewise, handle a partial month at the end of our range.
1149 if (d2 < DateTime.DaysInMonth (y2, m2)) {
1150 LNS.BooleanQuery sub_query;
1151 sub_query = new LNS.BooleanQuery ();
1152 sub_query.Add (NewYearMonthQuery (field_name, y2, m2), true, false);
1153 sub_query.Add (NewDayQuery (field_name, 1, d2), true, false);
1154 top_level_query.Add (sub_query, false, false);
1156 --m2;
1157 if (m2 == 0) {
1158 m2 = 12;
1159 --y2;
1163 // Generate the query for the "middle" of our period, if it is non-empty
1164 if (y1 < y2 || ((y1 == y2) && m1 <= m2))
1165 top_level_query.Add (NewYearMonthQuery (field_name, y1, m1, y2, m2),
1166 false, false);
1169 return top_level_query;
1172 // search_subset_uris is a list of Uris that this search should be
1173 // limited to.
1174 static protected void QueryPartToQuery (QueryPart abstract_part,
1175 bool only_build_primary_query,
1176 ArrayList term_list,
1177 out LNS.Query primary_query,
1178 out LNS.Query secondary_query,
1179 out HitFilter hit_filter)
1181 primary_query = null;
1182 secondary_query = null;
1184 // By default, we assume that our lucene queries will return exactly the
1185 // matching set of objects. We need to set the hit filter if further
1186 // refinement of the search results is required. (As in the case of
1187 // date range queries, for example.) We essentially have to do this
1188 // to make OR queries work correctly.
1189 hit_filter = true_hit_filter;
1191 // The exception is when dealing with a prohibited part. Just return
1192 // null for the hit filter in that case. This works since
1193 // prohibited parts are not allowed inside of OR queries.
1194 if (abstract_part.Logic == QueryPartLogic.Prohibited)
1195 hit_filter = null;
1197 if (abstract_part == null)
1198 return;
1200 if (abstract_part is QueryPart_Text) {
1201 QueryPart_Text part = (QueryPart_Text) abstract_part;
1203 if (! (part.SearchFullText || part.SearchTextProperties))
1204 return;
1206 LNS.BooleanQuery p_query = new LNS.BooleanQuery ();
1207 LNS.BooleanQuery s_query = new LNS.BooleanQuery ();
1209 if (part.SearchFullText) {
1210 LNS.Query subquery;
1211 subquery = StringToQuery ("Text", part.Text, term_list);
1212 if (subquery != null)
1213 p_query.Add (subquery, false, false);
1215 // FIXME: HotText is ignored for now!
1216 // subquery = StringToQuery ("HotText", part.Text);
1217 // if (subquery != null)
1218 // p_query.Add (subquery, false, false);
1221 if (part.SearchTextProperties) {
1222 LNS.Query subquery;
1223 subquery = StringToQuery ("PropertyText", part.Text, term_list);
1224 if (subquery != null) {
1225 p_query.Add (subquery, false, false);
1226 // Properties can live in either index
1227 if (! only_build_primary_query)
1228 s_query.Add (subquery.Clone () as LNS.Query, false, false);
1231 Term term;
1232 term = new Term ("PropertyKeyword", part.Text.ToLower ()); // make sure text is lowercased
1233 // FIXME: terms are already added in term_list. But they may have been tokenized
1234 // The term here is non-tokenized version. Should this be added to term_list ?
1235 // term_list is used to calculate scores
1236 if (term_list != null)
1237 term_list.Add (term);
1238 subquery = new LNS.TermQuery (term);
1239 p_query.Add (subquery, false, false);
1240 // Properties can live in either index
1241 if (! only_build_primary_query)
1242 s_query.Add (subquery.Clone () as LNS.Query, false, false);
1245 primary_query = p_query;
1246 if (! only_build_primary_query)
1247 secondary_query = s_query;
1249 return;
1252 if (abstract_part is QueryPart_Wildcard) {
1253 QueryPart_Wildcard part = (QueryPart_Wildcard) abstract_part;
1255 LNS.BooleanQuery p_query = new LNS.BooleanQuery ();
1256 LNS.BooleanQuery s_query = new LNS.BooleanQuery ();
1258 Term term;
1259 LNS.Query subquery;
1261 // Lower case the terms for searching
1262 string query_string_lower = part.QueryString.ToLower ();
1264 // Search text content
1265 term = new Term ("Text", query_string_lower);
1266 subquery = new LNS.WildcardQuery (term);
1267 p_query.Add (subquery, false, false);
1268 term_list.Add (term);
1270 // Search text properties
1271 term = new Term ("PropertyText", query_string_lower);
1272 subquery = new LNS.WildcardQuery (term);
1273 p_query.Add (subquery, false, false);
1274 // Properties can live in either index
1275 if (! only_build_primary_query)
1276 s_query.Add (subquery.Clone () as LNS.Query, false, false);
1277 term_list.Add (term);
1279 // Search property keywords
1280 term = new Term ("PropertyKeyword", query_string_lower);
1281 term_list.Add (term);
1282 subquery = new LNS.WildcardQuery (term);
1283 p_query.Add (subquery, false, false);
1284 // Properties can live in either index
1285 if (! only_build_primary_query)
1286 s_query.Add (subquery.Clone () as LNS.Query, false, false);
1288 primary_query = p_query;
1289 if (! only_build_primary_query)
1290 secondary_query = s_query;
1292 return;
1295 if (abstract_part is QueryPart_Property) {
1296 QueryPart_Property part = (QueryPart_Property) abstract_part;
1298 string field_name;
1299 if (part.Key == QueryPart_Property.AllProperties) {
1300 field_name = TypeToWildcardField (part.Type);
1301 // FIXME: probably shouldn't just return silently
1302 if (field_name == null)
1303 return;
1304 } else
1305 field_name = PropertyToFieldName (part.Type, part.Key);
1307 if (part.Type == PropertyType.Text)
1308 primary_query = StringToQuery (field_name, part.Value, term_list);
1309 else {
1310 Term term;
1311 term = new Term (field_name, part.Value.ToLower ());
1312 if (term_list != null)
1313 term_list.Add (term);
1314 primary_query = new LNS.TermQuery (term);
1317 // Properties can live in either index
1318 if (! only_build_primary_query && primary_query != null)
1319 secondary_query = primary_query.Clone () as LNS.Query;
1321 return;
1324 if (abstract_part is QueryPart_DateRange) {
1326 QueryPart_DateRange part = (QueryPart_DateRange) abstract_part;
1328 primary_query = GetDateRangeQuery (part, out hit_filter);
1329 // Date properties can live in either index
1330 if (! only_build_primary_query && primary_query != null)
1331 secondary_query = primary_query.Clone () as LNS.Query;
1333 // If this is a prohibited part, invert our hit filter.
1334 if (part.Logic == QueryPartLogic.Prohibited) {
1335 NotHitFilter nhf;
1336 nhf = new NotHitFilter (hit_filter);
1337 hit_filter = new HitFilter (nhf.HitFilter);
1340 return;
1343 if (abstract_part is QueryPart_Or) {
1344 QueryPart_Or part = (QueryPart_Or) abstract_part;
1346 // Assemble a new BooleanQuery combining all of the sub-parts.
1347 LNS.BooleanQuery p_query;
1348 p_query = new LNS.BooleanQuery ();
1350 LNS.BooleanQuery s_query = null;
1351 if (! only_build_primary_query)
1352 s_query = new LNS.BooleanQuery ();
1354 primary_query = p_query;
1355 secondary_query = s_query;
1357 OrHitFilter or_hit_filter = null;
1359 foreach (QueryPart sub_part in part.SubParts) {
1360 LNS.Query p_subq, s_subq;
1361 HitFilter sub_hit_filter; // FIXME: This is (and must be) ignored
1362 // FIXME: Any subpart in an OR which has a hit filter won't work
1363 // correctly, because we can't tell which part of an OR we matched
1364 // against to filter correctly. This affects date range queries.
1365 QueryPartToQuery (sub_part, only_build_primary_query,
1366 term_list,
1367 out p_subq, out s_subq, out sub_hit_filter);
1368 if (p_subq != null)
1369 p_query.Add (p_subq, false, false);
1370 if (s_subq != null)
1371 s_query.Add (s_subq, false, false);
1372 if (sub_hit_filter != null) {
1373 if (or_hit_filter == null)
1374 or_hit_filter = new OrHitFilter ();
1375 or_hit_filter.Add (sub_hit_filter);
1379 if (or_hit_filter != null)
1380 hit_filter = new HitFilter (or_hit_filter.HitFilter);
1382 return;
1385 throw new Exception ("Unhandled QueryPart type! " + abstract_part.ToString ());
1388 static protected LNS.Query UriQuery (string field_name, Uri uri)
1390 return new LNS.TermQuery (new Term (field_name, UriFu.UriToEscapedString (uri)));
1393 static protected LNS.Query UriQuery (string field_name, ICollection uri_list)
1395 return UriQuery (field_name, uri_list, null);
1398 static protected LNS.Query UriQuery (string field_name, ICollection uri_list, LNS.Query extra_requirement)
1400 if (uri_list.Count == 0)
1401 return null;
1403 int max_clauses;
1404 max_clauses = LNS.BooleanQuery.GetMaxClauseCount ();
1406 int N;
1407 N = 1 + (uri_list.Count - 1) / max_clauses;
1409 LNS.BooleanQuery top_query;
1410 top_query = new LNS.BooleanQuery ();
1412 int cursor = 0;
1413 if (extra_requirement != null) {
1414 top_query.Add (extra_requirement, true, false);
1415 ++cursor;
1418 ArrayList bottom_queries = null;
1420 if (N > 1) {
1421 bottom_queries = new ArrayList ();
1422 for (int i = 0; i < N; ++i) {
1423 LNS.BooleanQuery bq;
1424 bq = new LNS.BooleanQuery ();
1425 bottom_queries.Add (bq);
1426 top_query.Add (bq, false, false);
1430 foreach (Uri uri in uri_list) {
1431 LNS.Query subquery;
1432 subquery = UriQuery (field_name, uri);
1434 LNS.BooleanQuery target;
1435 if (N == 1)
1436 target = top_query;
1437 else {
1438 target = (LNS.BooleanQuery) bottom_queries [cursor];
1439 ++cursor;
1440 if (cursor >= N)
1441 cursor = 0;
1444 target.Add (subquery, false, false);
1447 return top_query;
1450 ///////////////////////////////////////////////////////////////////////////////////
1452 public int SegmentCount {
1453 get {
1454 DirectoryInfo dir_info;
1455 int p_count = 0, s_count = 0;
1457 dir_info = new DirectoryInfo (PrimaryIndexDirectory);
1458 foreach (FileInfo file_info in dir_info.GetFiles ())
1459 if (file_info.Extension == ".cfs")
1460 ++p_count;
1462 dir_info = new DirectoryInfo (SecondaryIndexDirectory);
1463 foreach (FileInfo file_info in dir_info.GetFiles ())
1464 if (file_info.Extension == ".cfs")
1465 ++s_count;
1467 return p_count > s_count ? p_count : s_count;
1471 ///////////////////////////////////////////////////////////////////////////////////
1473 // Cache IndexReaders on a per-Lucene index basis, since they
1474 // are extremely expensive to create. Note that using this
1475 // only makes sense in situations where the index only
1476 // possibly might change from underneath us, but most of the
1477 // time probably won't. This means it makes sense to do
1478 // this in LuceneQueryingDriver.cs, but it doesn't in
1479 // LuceneIndexingDriver.cs.
1481 private class ReaderAndVersion {
1483 public IndexReader Reader;
1484 public long Version;
1485 public int Refcount;
1487 public ReaderAndVersion (IndexReader reader, long version)
1489 this.Reader = reader;
1490 this.Version = version;
1491 this.Refcount = 1;
1495 static private Hashtable directory_rav_map = new Hashtable ();
1496 static private Hashtable reader_rav_map = new Hashtable ();
1498 static public LNS.IndexSearcher GetSearcher (Lucene.Net.Store.Directory directory)
1500 IndexReader reader = GetReader (directory);
1502 return new LNS.IndexSearcher (reader);
1505 static public IndexReader GetReader (Lucene.Net.Store.Directory directory)
1507 IndexReader reader;
1508 long version;
1510 lock (reader_rav_map) {
1511 ReaderAndVersion rav = (ReaderAndVersion) directory_rav_map [directory];
1513 if (rav == null) {
1514 version = IndexReader.GetCurrentVersion (directory);
1515 reader = IndexReader.Open (directory);
1517 rav = new ReaderAndVersion (reader, version);
1518 rav.Refcount++;
1520 directory_rav_map [directory] = rav;
1521 reader_rav_map [reader] = rav;
1523 return reader;
1526 version = IndexReader.GetCurrentVersion (directory);
1528 if (version != rav.Version) {
1529 UnrefReaderAndVersion_Unlocked (rav);
1531 reader = IndexReader.Open (directory);
1533 rav = new ReaderAndVersion (reader, version);
1534 rav.Refcount++;
1536 directory_rav_map [directory] = rav;
1537 reader_rav_map [reader] = rav;
1538 } else
1539 rav.Refcount++;
1541 return rav.Reader;
1545 static private void UnrefReaderAndVersion_Unlocked (ReaderAndVersion rav)
1547 rav.Refcount--;
1549 if (rav.Refcount == 0) {
1550 rav.Reader.Close ();
1551 reader_rav_map.Remove (rav.Reader);
1555 static public void ReleaseReader (IndexReader reader)
1557 lock (reader_rav_map) {
1558 ReaderAndVersion rav = (ReaderAndVersion) reader_rav_map [reader];
1560 UnrefReaderAndVersion_Unlocked (rav);
1564 static public void ReleaseSearcher (LNS.IndexSearcher searcher)
1566 IndexReader reader = searcher.GetIndexReader ();
1568 searcher.Close ();
1569 ReleaseReader (reader);
1572 ///////////////////////////////////////////////////////////////////////////////////
1575 // Various ways to grab lots of hits at once.
1576 // These should never be used for querying, only for utility
1577 // functions.
1580 public int GetBlockOfHits (int cookie,
1581 Hit [] block_of_hits)
1583 IndexReader primary_reader;
1584 IndexReader secondary_reader;
1585 primary_reader = GetReader (PrimaryStore);
1586 secondary_reader = GetReader (SecondaryStore);
1588 int request_size;
1589 request_size = block_of_hits.Length;
1590 if (request_size > primary_reader.NumDocs ())
1591 request_size = primary_reader.NumDocs ();
1593 int max_doc;
1594 max_doc = primary_reader.MaxDoc ();
1596 if (cookie < 0) {
1597 Random random;
1598 random = new Random ();
1599 cookie = random.Next (max_doc);
1602 int original_cookie;
1603 original_cookie = cookie;
1605 Hashtable primary_docs, secondary_docs;
1606 primary_docs = UriFu.NewHashtable ();
1607 secondary_docs = UriFu.NewHashtable ();
1609 // Load the primary documents
1610 for (int i = 0; i < request_size; ++i) {
1612 if (! primary_reader.IsDeleted (cookie)) {
1613 Document doc;
1614 doc = primary_reader.Document (cookie);
1615 primary_docs [GetUriFromDocument (doc)] = doc;
1618 ++cookie;
1619 if (cookie >= max_doc) // wrap around
1620 cookie = 0;
1622 // If we somehow end up back where we started,
1623 // give up.
1624 if (cookie == original_cookie)
1625 break;
1628 // If necessary, load the secondary documents
1629 if (secondary_reader != null) {
1630 LNS.IndexSearcher searcher;
1631 searcher = new LNS.IndexSearcher (secondary_reader);
1633 LNS.Query uri_query;
1634 uri_query = UriQuery ("Uri", primary_docs.Keys);
1636 LNS.Hits hits;
1637 hits = searcher.Search (uri_query);
1638 for (int i = 0; i < hits.Length (); ++i) {
1639 Document doc;
1640 doc = hits.Doc (i);
1641 secondary_docs [GetUriFromDocument (doc)] = doc;
1644 searcher.Close ();
1647 ReleaseReader (primary_reader);
1648 ReleaseReader (secondary_reader);
1650 // Now assemble the hits
1651 int j = 0;
1652 foreach (Uri uri in primary_docs.Keys) {
1653 Document primary_doc, secondary_doc;
1654 primary_doc = primary_docs [uri] as Document;
1655 secondary_doc = secondary_docs [uri] as Document;
1657 Hit hit;
1658 hit = DocumentToHit (primary_doc);
1659 if (secondary_doc != null)
1660 AddPropertiesToHit (hit, secondary_doc, false);
1662 block_of_hits [j] = hit;
1663 ++j;
1666 // null-pad the array, if necessary
1667 for (; j < block_of_hits.Length; ++j)
1668 block_of_hits [j] = null;
1671 // Return the new cookie
1672 return cookie;
1675 // For a large index, this will be very slow and will consume
1676 // a lot of memory. Don't call it without a good reason!
1677 // We return a hashtable indexed by Uri.
1678 public Hashtable GetAllHitsByUri ()
1680 Hashtable all_hits;
1681 all_hits = UriFu.NewHashtable ();
1683 IndexReader primary_reader;
1684 IndexReader secondary_reader;
1685 primary_reader = GetReader (PrimaryStore);
1686 secondary_reader = GetReader (SecondaryStore);
1688 // Load everything from the primary index
1689 int max_doc;
1690 max_doc = primary_reader.MaxDoc ();
1691 for (int i = 0; i < max_doc; ++i) {
1693 if (primary_reader.IsDeleted (i))
1694 continue;
1696 Document doc;
1697 doc = primary_reader.Document (i);
1699 Hit hit;
1700 hit = DocumentToHit (doc);
1701 all_hits [hit.Uri] = hit;
1704 // Now add in everything from the secondary index, if it exists
1705 if (secondary_reader != null) {
1706 max_doc = secondary_reader.MaxDoc ();
1707 for (int i = 0; i < max_doc; ++i) {
1709 if (secondary_reader.IsDeleted (i))
1710 continue;
1712 Document doc;
1713 doc = secondary_reader.Document (i);
1715 Uri uri;
1716 uri = GetUriFromDocument (doc);
1718 Hit hit;
1719 hit = (Hit) all_hits [uri];
1720 if (hit != null)
1721 AddPropertiesToHit (hit, doc, false);
1725 ReleaseReader (primary_reader);
1726 ReleaseReader (secondary_reader);
1728 return all_hits;