Initial revision
[beagle.git] / Lucene.Net / QueryParser / QueryParser.cs
blobe7ff41cc6e03d7af0a6bfc87d5d2a3540fdd556f
1 using System;
2 using System.IO;
3 using System.Globalization;
4 using System.Collections;
5 using Lucene.Net.Index;
6 using Lucene.Net.Analysis;
7 using Lucene.Net.Documents;
8 using Lucene.Net.Search;
10 namespace Lucene.Net.QueryParsers
12 /// <summary>
13 /// This class is generated by JavaCC. The only method that clients should need
14 /// to call is <a href="#parse">parse()</a>.
15 ///
16 /// The syntax for query strings is as follows:
17 /// A Query is a series of clauses.
18 /// A clause may be prefixed by:
19 /// <ul>
20 /// <li> a plus (<code>+</code>) or a minus (<code>-</code>) sign, indicating
21 /// that the clause is required or prohibited respectively; or</li>
22 /// <li> a term followed by a colon, indicating the field to be searched.
23 /// This enables one to construct queries which search multiple fields.</li>
24 /// </ul>
25 ///
26 /// A clause may be either:
27 /// <ul>
28 /// <li> a term, indicating all the documents that contain this term; or</li>
29 /// <li> a nested query, enclosed in parentheses. Note that this may be used
30 /// with a <code>+</code>/<code>-</code> prefix to require any of a set of
31 /// terms.</li>
32 /// </ul>
33 ///
34 /// Thus, in BNF, the query grammar is:
35 /// <pre>
36 /// Query ::= ( Clause )*
37 /// Clause ::= ["+", "-"] [&lt;TERM&gt; ":"] ( &lt;TERM&gt; | "(" Query ")" )
38 /// </pre>
39 ///
40 /// <p>
41 /// Examples of appropriately formatted queries can be found in the <a
42 /// href="http://jakarta.apache.org/lucene/src/test/org/apache/lucene/queryParser/TestQueryParser.java">test cases</a>.
43 /// </p>
44 /// </summary>
45 /// <author>Brian Goetz</author>
46 /// <author>Peter Halacsy</author>
47 /// <author>Tatu Saloranta</author>
48 public class QueryParser : QueryParserConstants
51 private const int CONJ_NONE = 0;
52 private const int CONJ_AND = 1;
53 private const int CONJ_OR = 2;
55 private const int MOD_NONE = 0;
56 private const int MOD_NOT = 10;
57 private const int MOD_REQ = 11;
59 public const int DEFAULT_OPERATOR_OR = 0;
60 public const int DEFAULT_OPERATOR_AND = 1;
62 /// <summary>
63 /// The actual _operator that parser uses to combine query terms
64 /// </summary>
65 private int _operator = DEFAULT_OPERATOR_OR;
67 /// <summary>
68 /// Whether terms of wildcard and prefix queries are to be automatically
69 /// lower-cased or not. Default is <code>true</code>.
70 /// </summary>
71 bool lowercaseWildcardTerms = true;
73 Analyzer analyzer;
74 String field;
75 int phraseSlop = 0;
76 IFormatProvider locale = CultureInfo.CurrentCulture;
78 /// <summary>
79 /// Parses a query string, returning a Lucene.Search.Query.
80 /// </summary>
81 /// <param name="query">the query string to be parsed.</param>
82 /// <param name="field">the default field for query terms.</param>
83 /// <param name="analyzer">used to find terms in the query text.</param>
84 /// <returns></returns>
85 /// <throws>ParseException if the parsing fails</throws>
86 static public Query Parse(String query, String field, Analyzer analyzer)
88 try
90 QueryParser parser = new QueryParser(field, analyzer);
91 return parser.Parse(query);
93 catch (TokenMgrError tme)
95 throw new ParseException(tme.Message);
99 /// <summary>
100 /// Constructs a query parser.
101 /// </summary>
102 /// <param name="f">the default field for query terms.</param>
103 /// <param name="a">used to find terms in the query text.</param>
104 public QueryParser(String f, Analyzer a) :
105 this(new FastCharStream(new StringReader("")))
107 analyzer = a;
108 field = f;
111 /// <summary>
112 /// Parses a query string, returning a
113 /// <a href="Lucene.Net.Search.Query.Html">Query</a>.
114 /// </summary>
115 /// <param name="query">the query string to be parsed.</param>
116 /// <returns></returns>
117 /// <throws>ParseException if the parsing fails</throws>
118 /// <throws>TokenMgrError if ther parsing fails</throws>
119 public Query Parse(String query)
121 ReInit(new FastCharStream(new StringReader(query)));
122 return Query(field);
125 /// <summary>
126 /// Sets the default slop for phrases. If zero, then exact phrase matches
127 /// are required. Default value is zero.
128 /// </summary>
129 /// <param name="phraseSlop"></param>
130 public void SetPhraseSlop(int phraseSlop)
132 this.phraseSlop = phraseSlop;
135 /// <summary>
136 /// Gets the default slop for phrases.
137 /// </summary>
138 /// <returns></returns>
139 public int GetPhraseSlop()
141 return phraseSlop;
144 /// <summary>
145 /// Sets the bool operator of the QueryParser.
146 /// In classic mode (<code>DEFAULT_OPERATOR_OR</code>) terms without any modifiers
147 /// are considered optional: for example <code>capital of Hungary</code> is equal to
148 /// <code>capital OR of OR Hungary</code>.<br/>
149 /// In <code>DEFAULT_OPERATOR_AND</code> terms are considered to be in conjuction: the
150 /// above mentioned query is parsed as <code>capital AND of AND Hungary</code>
151 /// </summary>
152 /// <param name="_operator"></param>
153 public void SetOperator(int _operator)
155 this._operator = _operator;
158 /// <summary>
159 /// Gets implicit operator setting, which will be either DEFAULT_OPERATOR_AND
160 /// or DEFAULT_OPERATOR_OR.
161 /// </summary>
162 public int GetOperator()
164 return _operator;
167 public void SetLowercaseWildcardTerms(bool lowercaseWildcardTerms)
169 this.lowercaseWildcardTerms = lowercaseWildcardTerms;
172 public bool GetLowercaseWildcardTerms()
174 return lowercaseWildcardTerms;
177 /// <summary>
178 /// Get and set locale used by date range parsing.
179 /// </summary>
180 public IFormatProvider Locale
184 return locale;
188 this.locale = value;
192 protected void AddClause(ArrayList clauses, int conj, int mods, Query q)
194 bool required, prohibited;
196 // If this term is introduced by AND, make the preceding term required,
197 // unless it's already prohibited
198 if (conj == CONJ_AND)
200 BooleanClause c = (BooleanClause) clauses[clauses.Count-1];
201 if (!c.prohibited)
202 c.required = true;
205 if (_operator == DEFAULT_OPERATOR_AND && conj == CONJ_OR)
207 // If this term is introduced by OR, make the preceding term optional,
208 // unless it's prohibited (that means we leave -a OR b but +a OR b-->a OR b)
209 // notice if the input is a OR b, first term is parsed as required; without
210 // this modification a OR b would parsed as +a OR b
211 BooleanClause c = (BooleanClause) clauses[clauses.Count-1];
212 if (!c.prohibited)
213 c.required = false;
216 // We might have been passed a null query; the term might have been
217 // filtered away by the analyzer.
218 if (q == null)
219 return;
221 if (_operator == DEFAULT_OPERATOR_OR)
223 // We set REQUIRED if we're introduced by AND or +; PROHIBITED if
224 // introduced by NOT or -; make sure not to set both.
225 prohibited = (mods == MOD_NOT);
226 required = (mods == MOD_REQ);
227 if (conj == CONJ_AND && !prohibited)
229 required = true;
232 else
234 // We set PROHIBITED if we're introduced by NOT or -; We set REQUIRED
235 // if not PROHIBITED and not introduced by OR
236 prohibited = (mods == MOD_NOT);
237 required = (!prohibited && conj != CONJ_OR);
239 clauses.Add(new BooleanClause(q, required, prohibited));
242 protected virtual Query GetFieldQuery(String field,
243 Analyzer analyzer,
244 String queryText)
246 // Use the analyzer to get all the tokens, and then build a TermQuery,
247 // PhraseQuery, or nothing based on the term count
249 TokenStream source = analyzer.TokenStream(field,
250 new StringReader(queryText));
251 ArrayList v = new ArrayList();
252 Lucene.Net.Analysis.Token t;
254 while (true)
256 try
258 t = source.Next();
260 catch (IOException)
262 t = null;
264 if (t == null)
265 break;
266 v.Add(t.TermText());
268 try
270 source.Close();
272 catch (System.IO.IOException)
274 // ignore
277 if (v.Count == 0)
278 return null;
279 else if (v.Count == 1)
280 return new TermQuery(new Term(field, (String) v[0]));
281 else
283 PhraseQuery q = new PhraseQuery();
284 q.SetSlop(phraseSlop);
285 for (int i=0; i<v.Count; i++)
287 q.Add(new Term(field, (String)v[i]));
289 return q;
293 protected virtual Query GetRangeQuery(String field,
294 Analyzer analyzer,
295 String part1,
296 String part2,
297 bool inclusive)
299 try
301 DateTime d1 = DateTime.Parse(part1, locale);
302 DateTime d2 = DateTime.Parse(part2, locale);
303 part1 = DateField.DateToString(d1);
304 part2 = DateField.DateToString(d2);
306 catch (Exception) { }
308 return new RangeQuery(new Term(field, part1),
309 new Term(field, part2),
310 inclusive);
313 /// <summary>
314 /// Factory method for generating query, given a set of clauses.
315 /// By default creates a bool query composed of clauses passed in.
317 /// Can be overridden by extending classes, to modify query being
318 /// returned.
319 /// </summary>
320 /// <param name="clauses">
321 /// ArrayList that contains BooleanClause instances to join.
322 /// </param>
323 /// <returns>Resulting Query object.</returns>
324 protected virtual Query GetBoolQuery(ArrayList clauses)
326 BooleanQuery query = new BooleanQuery();
327 for (int i = 0; i < clauses.Count; i++)
329 query.Add((BooleanClause)clauses[i]);
331 return query;
334 /// <summary>
335 /// Factory method for generating a query. Called when parser
336 /// parses an input term token that contains one or more wildcard
337 /// characters (? and *), but is not a prefix term token (one
338 /// that has just a single * character at the end)
339 /// <p>
340 /// Depending on settings, prefix term may be lower-cased
341 /// automatically. It will not go through the default Analyzer,
342 /// however, since normal Analyzers are unlikely to work properly
343 /// with wildcard templates.
344 /// </p>
345 /// <p>
346 /// Can be overridden by extending classes, to provide custom handling for
347 /// wildcard queries, which may be necessary due to missing analyzer calls.</p>
348 /// </summary>
349 /// <param name="field">Name of the field query will use.</param>
350 /// <param name="termStr">Term token that contains one or more wild card
351 /// characters (? or *), but is not simple prefix term
352 /// </param>
353 /// <returns>Resulting Query built for the term</returns>
354 protected virtual Query GetWildcardQuery(String field, String termStr)
356 if (lowercaseWildcardTerms)
358 termStr = termStr.ToLower();
360 Term t = new Term(field, termStr);
361 return new WildcardQuery(t);
364 /// <summary>
365 /// Factory method for generating a query (similar to
366 /// GetWildcardQuery). Called when parser parses an input term
367 /// token that uses prefix notation; that is, contains a single '*' wildcard
368 /// character as its last character. Since this is a special case
369 /// of generic wildcard term, and such a query can be optimized easily,
370 /// this usually results in a different query object.
371 /// <p>
372 /// Depending on settings, a prefix term may be lower-cased
373 /// automatically. It will not go through the default Analyzer,
374 /// however, since normal Analyzers are unlikely to work properly
375 /// with wildcard templates.
376 /// </p>
377 /// <p>
378 /// Can be overridden by extending classes, to provide custom handling for
379 /// wild card queries, which may be necessary due to missing analyzer calls.
380 /// </p>
381 /// (<b>without</b> trailing '*' character!)
382 /// </summary>
383 /// <param name="field">Name of the field query will use.</param>
384 /// <param name="termStr">Term token to use for building term for the query</param>
385 /// <returns>Resulting Query built for the term</returns>
386 protected Query GetPrefixQuery(String field, String termStr)
388 if (lowercaseWildcardTerms)
390 termStr = termStr.ToLower();
392 Term t = new Term(field, termStr);
393 return new PrefixQuery(t);
396 /// <summary>
397 /// Factory method for generating a query (similar to
398 /// GetWildcardQuery). Called when parser parses
399 /// an input term token that has the fuzzy suffix (~) appended.
400 /// </summary>
401 /// <param name="field">Name of the field query will use.</param>
402 /// <param name="termStr">Term token to use for building term for the query</param>
403 /// <returns>Resulting Query built for the term</returns>
404 protected virtual Query GetFuzzyQuery(String field, String termStr)
406 Term t = new Term(field, termStr);
407 return new FuzzyQuery(t);
410 public static void Main(String[] args)
412 QueryParser qp = new QueryParser(
413 "field",
414 new Lucene.Net.Analysis.SimpleAnalyzer()
416 Query q = qp.Parse(args[0]);
417 Console.WriteLine(q.ToString("field"));
420 /// <summary>
421 /// Query ::= ( Clause )*
422 /// Clause ::= ["+", "-"] [&lt;TERM> ":"] ( &lt;TERM> | "(" Query ")" )
423 /// </summary>
424 /// <returns></returns>
425 public int Conjunction()
427 int ret = CONJ_NONE;
428 switch ((_jj_ntk==-1)?jj_ntk():_jj_ntk)
430 case AND:
431 goto case OR;
432 case OR:
433 switch ((_jj_ntk==-1)?jj_ntk():_jj_ntk)
435 case AND:
436 jj_consume_token(AND);
437 ret = CONJ_AND;
438 break;
439 case OR:
440 jj_consume_token(OR);
441 ret = CONJ_OR;
442 break;
443 default:
444 jj_la1[0] = jj_gen;
445 jj_consume_token(-1);
446 throw new ParseException();
448 break;
449 default:
450 jj_la1[1] = jj_gen;;
451 break;
454 if (true) return ret;
458 public int Modifiers()
460 int ret = MOD_NONE;
461 switch ((_jj_ntk==-1)?jj_ntk():_jj_ntk)
463 case NOT:
464 goto case MINUS;
465 case PLUS:
466 goto case MINUS;
467 case MINUS:
468 switch ((_jj_ntk==-1)?jj_ntk():_jj_ntk)
470 case PLUS:
471 jj_consume_token(PLUS);
472 ret = MOD_REQ;
473 break;
474 case MINUS:
475 jj_consume_token(MINUS);
476 ret = MOD_NOT;
477 break;
478 case NOT:
479 jj_consume_token(NOT);
480 ret = MOD_NOT;
481 break;
482 default:
483 jj_la1[2] = jj_gen;
484 jj_consume_token(-1);
485 throw new ParseException();
487 break;
488 default:
489 jj_la1[3] = jj_gen;
490 break;
492 {if (true) return ret;}
495 public Query Query(String field)
497 ArrayList clauses = new ArrayList();
498 Query q, firstQuery=null;
499 int conj, mods;
500 mods = Modifiers();
501 q = Clause(field);
502 AddClause(clauses, CONJ_NONE, mods, q);
503 if (mods == MOD_NONE)
504 firstQuery=q;
506 while (true)
508 switch ((_jj_ntk==-1)?jj_ntk():_jj_ntk)
510 case AND:
511 break;
512 case OR:
513 break;
514 case NOT:
515 break;
516 case PLUS:
517 break;
518 case MINUS:
519 break;
520 case LPAREN:
521 break;
522 case QUOTED:
523 break;
524 case TERM:
525 break;
526 case PREFIXTERM:
527 break;
528 case WILDTERM:
529 break;
530 case RANGEIN_START:
531 break;
532 case RANGEEX_START:
533 break;
534 case NUMBER:
535 break;
536 default:
537 jj_la1[4] = jj_gen;
538 goto l1;
540 conj = Conjunction();
541 mods = Modifiers();
542 q = Clause(field);
543 AddClause(clauses, conj, mods, q);
545 l1:
547 if (clauses.Count == 1 && firstQuery != null)
548 {return firstQuery;}
549 else
551 return GetBoolQuery(clauses);
555 public Query Clause(String field)
557 Query q;
558 Token fieldToken=null, boost=null;
559 if (jj_2_1(2))
561 fieldToken = jj_consume_token(TERM);
562 jj_consume_token(COLON);
563 field = fieldToken.image;
565 else
569 switch ((_jj_ntk==-1)?jj_ntk():_jj_ntk)
571 case QUOTED:
572 case TERM:
573 case PREFIXTERM:
574 case WILDTERM:
575 case RANGEIN_START:
576 case RANGEEX_START:
577 case NUMBER:
578 q = Term(field);
579 break;
580 case LPAREN:
581 jj_consume_token(LPAREN);
582 q = Query(field);
583 jj_consume_token(RPAREN);
584 switch ((_jj_ntk==-1)?jj_ntk():_jj_ntk)
586 case CARAT:
587 jj_consume_token(CARAT);
588 boost = jj_consume_token(NUMBER);
589 break;
590 default:
591 jj_la1[5] = jj_gen;
592 break;
594 break;
595 default:
596 jj_la1[6] = jj_gen;
597 jj_consume_token(-1);
598 throw new ParseException();
600 if (boost != null)
602 float f = (float)1.0;
603 try
605 f = Single.Parse(boost.image, NumberFormatInfo.InvariantInfo);
606 q.SetBoost(f);
608 catch (Exception) { }
610 {if (true) return q;}
613 public Query Term(String field)
615 Token term, boost=null, slop=null, goop1, goop2;
616 bool prefix = false;
617 bool wildcard = false;
618 bool fuzzy = false;
619 Query q;
620 switch ((_jj_ntk==-1)?jj_ntk():_jj_ntk)
622 case TERM:
623 case PREFIXTERM:
624 case WILDTERM:
625 case NUMBER:
626 switch ((_jj_ntk==-1)?jj_ntk():_jj_ntk)
628 case TERM:
629 term = jj_consume_token(TERM);
630 break;
631 case PREFIXTERM:
632 term = jj_consume_token(PREFIXTERM);
633 prefix=true;
634 break;
635 case WILDTERM:
636 term = jj_consume_token(WILDTERM);
637 wildcard=true;
638 break;
639 case NUMBER:
640 term = jj_consume_token(NUMBER);
641 break;
642 default:
643 jj_la1[7] = jj_gen;
644 jj_consume_token(-1);
645 throw new ParseException();
647 switch ((_jj_ntk==-1)?jj_ntk():_jj_ntk)
649 case FUZZY:
650 jj_consume_token(FUZZY);
651 fuzzy=true;
652 break;
653 default:
654 jj_la1[8] = jj_gen;
655 break;
657 switch ((_jj_ntk==-1)?jj_ntk():_jj_ntk)
659 case CARAT:
660 jj_consume_token(CARAT);
661 boost = jj_consume_token(NUMBER);
663 switch ((_jj_ntk==-1)?jj_ntk():_jj_ntk)
665 case FUZZY:
666 jj_consume_token(FUZZY);
667 fuzzy=true;
668 break;
669 default:
670 jj_la1[9] = jj_gen;
671 break;
673 break;
674 default:
675 jj_la1[10] = jj_gen;
676 break;
678 if (wildcard)
680 q = GetWildcardQuery(field, term.image);
682 else if (prefix)
684 q = GetPrefixQuery(field, term.image.Substring
685 (0, term.image.Length-1));
687 else if (fuzzy)
689 q = GetFuzzyQuery(field, term.image);
691 else
693 q = GetFieldQuery(field, analyzer, term.image);
695 break;
696 case RANGEIN_START:
697 jj_consume_token(RANGEIN_START);
698 switch ((_jj_ntk==-1)?jj_ntk():_jj_ntk)
700 case RANGEIN_GOOP:
701 goop1 = jj_consume_token(RANGEIN_GOOP);
702 break;
703 case RANGEIN_QUOTED:
704 goop1 = jj_consume_token(RANGEIN_QUOTED);
705 break;
706 default:
707 jj_la1[11] = jj_gen;
708 jj_consume_token(-1);
709 throw new ParseException();
711 switch ((_jj_ntk==-1)?jj_ntk():_jj_ntk)
713 case RANGEIN_TO:
714 jj_consume_token(RANGEIN_TO);
715 break;
716 default:
717 jj_la1[12] = jj_gen;
718 break;
720 switch ((_jj_ntk==-1)?jj_ntk():_jj_ntk)
722 case RANGEIN_GOOP:
723 goop2 = jj_consume_token(RANGEIN_GOOP);
724 break;
725 case RANGEIN_QUOTED:
726 goop2 = jj_consume_token(RANGEIN_QUOTED);
727 break;
728 default:
729 jj_la1[13] = jj_gen;
730 jj_consume_token(-1);
731 throw new ParseException();
733 jj_consume_token(RANGEIN_END);
734 switch ((_jj_ntk==-1)?jj_ntk():_jj_ntk)
736 case CARAT:
737 jj_consume_token(CARAT);
738 boost = jj_consume_token(NUMBER);
739 break;
740 default:
741 jj_la1[14] = jj_gen;
742 break;
744 if (goop1.kind == RANGEIN_QUOTED)
745 goop1.image = goop1.image.Substring(1, goop1.image.Length-2);
746 if (goop2.kind == RANGEIN_QUOTED)
747 goop2.image = goop2.image.Substring(1, goop2.image.Length-2);
749 q = GetRangeQuery(field, analyzer, goop1.image, goop2.image, true);
750 break;
751 case RANGEEX_START:
752 jj_consume_token(RANGEEX_START);
753 switch ((_jj_ntk==-1)?jj_ntk():_jj_ntk)
755 case RANGEEX_GOOP:
756 goop1 = jj_consume_token(RANGEEX_GOOP);
757 break;
758 case RANGEEX_QUOTED:
759 goop1 = jj_consume_token(RANGEEX_QUOTED);
760 break;
761 default:
762 jj_la1[15] = jj_gen;
763 jj_consume_token(-1);
764 throw new ParseException();
766 switch ((_jj_ntk==-1)?jj_ntk():_jj_ntk)
768 case RANGEEX_TO:
769 jj_consume_token(RANGEEX_TO);
770 break;
771 default:
772 jj_la1[16] = jj_gen;
773 break;
775 switch ((_jj_ntk==-1)?jj_ntk():_jj_ntk)
777 case RANGEEX_GOOP:
778 goop2 = jj_consume_token(RANGEEX_GOOP);
779 break;
780 case RANGEEX_QUOTED:
781 goop2 = jj_consume_token(RANGEEX_QUOTED);
782 break;
783 default:
784 jj_la1[17] = jj_gen;
785 jj_consume_token(-1);
786 throw new ParseException();
788 jj_consume_token(RANGEEX_END);
789 switch ((_jj_ntk==-1)?jj_ntk():_jj_ntk)
791 case CARAT:
792 jj_consume_token(CARAT);
793 boost = jj_consume_token(NUMBER);
794 break;
795 default:
796 jj_la1[18] = jj_gen;
797 break;
799 if (goop1.kind == RANGEEX_QUOTED)
800 goop1.image = goop1.image.Substring(1, goop1.image.Length-2);
801 if (goop2.kind == RANGEEX_QUOTED)
802 goop2.image = goop2.image.Substring(1, goop2.image.Length-2);
804 q = GetRangeQuery(field, analyzer, goop1.image, goop2.image, false);
805 break;
806 case QUOTED:
807 term = jj_consume_token(QUOTED);
808 switch ((_jj_ntk==-1)?jj_ntk():_jj_ntk)
810 case SLOP:
811 slop = jj_consume_token(SLOP);
812 break;
813 default:
814 jj_la1[19] = jj_gen;
815 break;
817 switch ((_jj_ntk==-1)?jj_ntk():_jj_ntk)
819 case CARAT:
820 jj_consume_token(CARAT);
821 boost = jj_consume_token(NUMBER);
822 break;
823 default:
824 jj_la1[20] = jj_gen;
825 break;
827 q = GetFieldQuery(field, analyzer,
828 term.image.Substring(1, term.image.Length-2));
829 if (slop != null && q is PhraseQuery)
831 try
833 int s = (int)Single.Parse(slop.image.Substring(1), NumberFormatInfo.InvariantInfo);
834 ((PhraseQuery) q).SetSlop(s);
836 catch (Exception) { }
838 break;
839 default:
840 jj_la1[21] = jj_gen;
841 jj_consume_token(-1);
842 throw new ParseException();
844 if (boost != null)
846 float f = (float) 1.0;
847 try
849 f = Single.Parse(boost.image, NumberFormatInfo.InvariantInfo);
851 catch (Exception )
853 /* Should this be handled somehow? (defaults to "no boost", if
854 * boost number is invalid)
858 // avoid boosting null queries, such as those caused by stop words
859 if (q != null)
861 q.SetBoost(f);
864 {if (true) return q;}
867 private bool jj_2_1(int xla)
869 jj_la = xla; jj_lastpos = jj_scanpos = token;
870 try { return !jj_3_1(); }
871 catch(LookaheadSuccess) { return true; }
872 finally { jj_save(0, xla); }
875 private bool jj_3_1()
877 if (jj_scan_token(TERM)) return true;
878 if (jj_scan_token(COLON)) return true;
879 return false;
882 public QueryParserTokenManager token_source;
883 public Token token, jj_nt;
884 private int _jj_ntk;
885 private Token jj_scanpos, jj_lastpos;
886 private int jj_la;
887 public bool lookingAhead = false;
888 //private bool jj_semLA;
889 private int jj_gen;
890 readonly private int[] jj_la1 = new int[22];
891 static private uint[] _jj_la1_0;
892 static private int[] _jj_la1_1;
893 static QueryParser()
895 jj_la1_0();
896 jj_la1_1();
898 private static void jj_la1_0()
900 _jj_la1_0 = new uint[] {0x180,0x180,0xe00,0xe00,0x1f31f80,0x8000,0x1f31000,0x1320000,0x40000,0x40000,0x8000,0x18000000,0x2000000,0x18000000,0x8000,0x80000000,0x20000000,0x80000000,0x8000,0x80000,0x8000,0x1f30000,};
902 private static void jj_la1_1()
904 _jj_la1_1 = new int[] {0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x1,0x0,0x1,0x0,0x0,0x0,0x0,};
906 readonly private JJCalls[] jj_2_rtns = new JJCalls[1];
907 private bool jj_rescan = false;
908 private int jj_gc = 0;
910 public QueryParser(CharStream stream)
912 token_source = new QueryParserTokenManager(stream);
913 token = new Token();
914 _jj_ntk = -1;
915 jj_gen = 0;
916 for (int i = 0; i < 22; i++) jj_la1[i] = -1;
917 for (int i = 0; i < jj_2_rtns.Length; i++) jj_2_rtns[i] = new JJCalls();
920 public void ReInit(CharStream stream)
922 token_source.ReInit(stream);
923 token = new Token();
924 _jj_ntk = -1;
925 jj_gen = 0;
926 for (int i = 0; i < 22; i++) jj_la1[i] = -1;
927 for (int i = 0; i < jj_2_rtns.Length; i++) jj_2_rtns[i] = new JJCalls();
930 public QueryParser(QueryParserTokenManager tm)
932 token_source = tm;
933 token = new Token();
934 _jj_ntk = -1;
935 jj_gen = 0;
936 for (int i = 0; i < 22; i++) jj_la1[i] = -1;
937 for (int i = 0; i < jj_2_rtns.Length; i++) jj_2_rtns[i] = new JJCalls();
940 public void ReInit(QueryParserTokenManager tm)
942 token_source = tm;
943 token = new Token();
944 _jj_ntk = -1;
945 jj_gen = 0;
946 for (int i = 0; i < 22; i++) jj_la1[i] = -1;
947 for (int i = 0; i < jj_2_rtns.Length; i++) jj_2_rtns[i] = new JJCalls();
950 private Token jj_consume_token(int kind)
952 Token oldToken = null;
953 if ((oldToken = token).next != null) token = token.next;
954 else token = token.next = token_source.getNextToken();
955 _jj_ntk = -1;
956 if (token.kind == kind)
958 jj_gen++;
959 if (++jj_gc > 100)
961 jj_gc = 0;
962 for (int i = 0; i < jj_2_rtns.Length; i++)
964 JJCalls c = jj_2_rtns[i];
965 while (c != null)
967 if (c.gen < jj_gen) c.first = null;
968 c = c.next;
972 return token;
974 token = oldToken;
975 jj_kind = kind;
976 throw GenerateParseException();
979 private sealed class LookaheadSuccess : Exception { }
980 private LookaheadSuccess jj_ls = new LookaheadSuccess();
982 private bool jj_scan_token(int kind)
984 if (jj_scanpos == jj_lastpos)
986 jj_la--;
987 if (jj_scanpos.next == null)
989 jj_lastpos = jj_scanpos = jj_scanpos.next = token_source.getNextToken();
991 else
993 jj_lastpos = jj_scanpos = jj_scanpos.next;
996 else
998 jj_scanpos = jj_scanpos.next;
1000 if (jj_rescan)
1002 int i = 0; Token tok = token;
1003 while (tok != null && tok != jj_scanpos) { i++; tok = tok.next; }
1004 if (tok != null) jj_add_error_token(kind, i);
1006 return (jj_scanpos.kind != kind);
1009 public Token GetNextToken()
1011 if (token.next != null) token = token.next;
1012 else token = token.next = token_source.getNextToken();
1013 _jj_ntk = -1;
1014 jj_gen++;
1015 return token;
1018 public Token GetToken(int index)
1020 Token t = lookingAhead ? jj_scanpos : token;
1021 for (int i = 0; i < index; i++)
1023 if (t.next != null) t = t.next;
1024 else t = t.next = token_source.getNextToken();
1026 return t;
1029 private int jj_ntk()
1031 jj_nt = token.next;
1032 if (jj_nt == null)
1034 token.next = token_source.getNextToken();
1035 return (_jj_ntk = token.next.kind);
1037 else
1038 return (_jj_ntk = jj_nt.kind);
1041 private ArrayList jj_expentries = new ArrayList();
1042 private int[] jj_expentry;
1043 private int jj_kind = -1;
1044 private int[] jj_lasttokens = new int[100];
1045 private int jj_endpos;
1047 private void jj_add_error_token(int kind, int pos)
1049 if (pos >= 100) return;
1050 if (pos == jj_endpos + 1)
1052 jj_lasttokens[jj_endpos++] = kind;
1054 else if (jj_endpos != 0)
1056 jj_expentry = new int[jj_endpos];
1057 for (int i = 0; i < jj_endpos; i++)
1059 jj_expentry[i] = jj_lasttokens[i];
1061 bool exists = false;
1062 foreach (int[] oldentry in jj_expentries)
1064 if (oldentry.Length == jj_expentry.Length)
1066 exists = true;
1067 for (int i = 0; i < jj_expentry.Length; i++)
1069 if (oldentry[i] != jj_expentry[i])
1071 exists = false;
1072 break;
1075 if (exists) break;
1078 if (!exists) jj_expentries.Add(jj_expentry);
1079 if (pos != 0) jj_lasttokens[(jj_endpos = pos) - 1] = kind;
1083 public ParseException GenerateParseException()
1085 jj_expentries.Clear();
1086 bool[] la1tokens = new bool[33];
1087 for (int i = 0; i < 33; i++)
1089 la1tokens[i] = false;
1091 if (jj_kind >= 0)
1093 la1tokens[jj_kind] = true;
1094 jj_kind = -1;
1096 for (int i = 0; i < 22; i++)
1098 if (jj_la1[i] == jj_gen)
1100 for (int j = 0; j < 32; j++)
1102 if ((_jj_la1_0[i] & (1<<j)) != 0)
1104 la1tokens[j] = true;
1106 if ((_jj_la1_1[i] & (1<<j)) != 0)
1108 la1tokens[32+j] = true;
1113 for (int i = 0; i < 33; i++)
1115 if (la1tokens[i])
1117 jj_expentry = new int[1];
1118 jj_expentry[0] = i;
1119 jj_expentries.Add(jj_expentry);
1122 jj_endpos = 0;
1123 jj_rescan_token();
1124 jj_add_error_token(0, 0);
1125 int[][] exptokseq = new int[jj_expentries.Count][];
1126 for (int i = 0; i < jj_expentries.Count; i++)
1128 exptokseq[i] = (int[])jj_expentries[i];
1130 return new ParseException(token, exptokseq, tokenImage);
1133 public void enable_tracing()
1137 public void disable_tracing()
1141 private void jj_rescan_token()
1143 jj_rescan = true;
1144 for (int i = 0; i < 1; i++)
1146 JJCalls p = jj_2_rtns[i];
1149 if (p.gen > jj_gen)
1151 jj_la = p.arg; jj_lastpos = jj_scanpos = p.first;
1152 switch (i)
1154 case 0: jj_3_1(); break;
1157 p = p.next;
1158 } while (p != null);
1160 jj_rescan = false;
1163 private void jj_save(int index, int xla)
1165 JJCalls p = jj_2_rtns[index];
1166 while (p.gen > jj_gen)
1168 if (p.next == null) { p = p.next = new JJCalls(); break; }
1169 p = p.next;
1171 p.gen = jj_gen + xla - jj_la; p.first = token; p.arg = xla;
1174 class JJCalls
1176 internal int gen;
1177 internal Token first;
1178 internal int arg;
1179 internal JJCalls next;