Update mojo sdk to rev 1dc8a9a5db73d3718d99917fadf31f5fb2ebad4f
[chromium-blink-merge.git] / third_party / harfbuzz-ng / src / hb-ot-layout-gsubgpos-private.hh
blobcbc6840bc86214eaf280c399ae9d833c23770941
1 /*
2 * Copyright © 2007,2008,2009,2010 Red Hat, Inc.
3 * Copyright © 2010,2012 Google, Inc.
5 * This is part of HarfBuzz, a text shaping library.
7 * Permission is hereby granted, without written agreement and without
8 * license or royalty fees, to use, copy, modify, and distribute this
9 * software and its documentation for any purpose, provided that the
10 * above copyright notice and the following two paragraphs appear in
11 * all copies of this software.
13 * IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR
14 * DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES
15 * ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN
16 * IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
17 * DAMAGE.
19 * THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING,
20 * BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
21 * FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE PROVIDED HEREUNDER IS
22 * ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO
23 * PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
25 * Red Hat Author(s): Behdad Esfahbod
26 * Google Author(s): Behdad Esfahbod
29 #ifndef HB_OT_LAYOUT_GSUBGPOS_PRIVATE_HH
30 #define HB_OT_LAYOUT_GSUBGPOS_PRIVATE_HH
32 #include "hb-buffer-private.hh"
33 #include "hb-ot-layout-gdef-table.hh"
34 #include "hb-set-private.hh"
37 namespace OT {
40 #ifndef HB_DEBUG_CLOSURE
41 #define HB_DEBUG_CLOSURE (HB_DEBUG+0)
42 #endif
44 #define TRACE_CLOSURE(this) \
45 hb_auto_trace_t<HB_DEBUG_CLOSURE, hb_void_t> trace \
46 (&c->debug_depth, c->get_name (), this, HB_FUNC, \
47 "");
49 struct hb_closure_context_t
51 inline const char *get_name (void) { return "CLOSURE"; }
52 static const unsigned int max_debug_depth = HB_DEBUG_CLOSURE;
53 typedef hb_void_t return_t;
54 typedef return_t (*recurse_func_t) (hb_closure_context_t *c, unsigned int lookup_index);
55 template <typename T, typename F>
56 inline bool may_dispatch (const T *obj, const F *format) { return true; }
57 template <typename T>
58 inline return_t dispatch (const T &obj) { obj.closure (this); return HB_VOID; }
59 static return_t default_return_value (void) { return HB_VOID; }
60 bool stop_sublookup_iteration (return_t r HB_UNUSED) const { return false; }
61 return_t recurse (unsigned int lookup_index)
63 if (unlikely (nesting_level_left == 0 || !recurse_func))
64 return default_return_value ();
66 nesting_level_left--;
67 recurse_func (this, lookup_index);
68 nesting_level_left++;
69 return HB_VOID;
72 hb_face_t *face;
73 hb_set_t *glyphs;
74 recurse_func_t recurse_func;
75 unsigned int nesting_level_left;
76 unsigned int debug_depth;
78 hb_closure_context_t (hb_face_t *face_,
79 hb_set_t *glyphs_,
80 unsigned int nesting_level_left_ = MAX_NESTING_LEVEL) :
81 face (face_),
82 glyphs (glyphs_),
83 recurse_func (NULL),
84 nesting_level_left (nesting_level_left_),
85 debug_depth (0) {}
87 void set_recurse_func (recurse_func_t func) { recurse_func = func; }
92 #ifndef HB_DEBUG_WOULD_APPLY
93 #define HB_DEBUG_WOULD_APPLY (HB_DEBUG+0)
94 #endif
96 #define TRACE_WOULD_APPLY(this) \
97 hb_auto_trace_t<HB_DEBUG_WOULD_APPLY, bool> trace \
98 (&c->debug_depth, c->get_name (), this, HB_FUNC, \
99 "%d glyphs", c->len);
101 struct hb_would_apply_context_t
103 inline const char *get_name (void) { return "WOULD_APPLY"; }
104 static const unsigned int max_debug_depth = HB_DEBUG_WOULD_APPLY;
105 typedef bool return_t;
106 template <typename T, typename F>
107 inline bool may_dispatch (const T *obj, const F *format) { return true; }
108 template <typename T>
109 inline return_t dispatch (const T &obj) { return obj.would_apply (this); }
110 static return_t default_return_value (void) { return false; }
111 bool stop_sublookup_iteration (return_t r) const { return r; }
113 hb_face_t *face;
114 const hb_codepoint_t *glyphs;
115 unsigned int len;
116 bool zero_context;
117 unsigned int debug_depth;
119 hb_would_apply_context_t (hb_face_t *face_,
120 const hb_codepoint_t *glyphs_,
121 unsigned int len_,
122 bool zero_context_) :
123 face (face_),
124 glyphs (glyphs_),
125 len (len_),
126 zero_context (zero_context_),
127 debug_depth (0) {}
132 #ifndef HB_DEBUG_COLLECT_GLYPHS
133 #define HB_DEBUG_COLLECT_GLYPHS (HB_DEBUG+0)
134 #endif
136 #define TRACE_COLLECT_GLYPHS(this) \
137 hb_auto_trace_t<HB_DEBUG_COLLECT_GLYPHS, hb_void_t> trace \
138 (&c->debug_depth, c->get_name (), this, HB_FUNC, \
139 "");
141 struct hb_collect_glyphs_context_t
143 inline const char *get_name (void) { return "COLLECT_GLYPHS"; }
144 static const unsigned int max_debug_depth = HB_DEBUG_COLLECT_GLYPHS;
145 typedef hb_void_t return_t;
146 typedef return_t (*recurse_func_t) (hb_collect_glyphs_context_t *c, unsigned int lookup_index);
147 template <typename T, typename F>
148 inline bool may_dispatch (const T *obj, const F *format) { return true; }
149 template <typename T>
150 inline return_t dispatch (const T &obj) { obj.collect_glyphs (this); return HB_VOID; }
151 static return_t default_return_value (void) { return HB_VOID; }
152 bool stop_sublookup_iteration (return_t r HB_UNUSED) const { return false; }
153 return_t recurse (unsigned int lookup_index)
155 if (unlikely (nesting_level_left == 0 || !recurse_func))
156 return default_return_value ();
158 /* Note that GPOS sets recurse_func to NULL already, so it doesn't get
159 * past the previous check. For GSUB, we only want to collect the output
160 * glyphs in the recursion. If output is not requested, we can go home now.
162 * Note further, that the above is not exactly correct. A recursed lookup
163 * is allowed to match input that is not matched in the context, but that's
164 * not how most fonts are built. It's possible to relax that and recurse
165 * with all sets here if it proves to be an issue.
168 if (output == hb_set_get_empty ())
169 return HB_VOID;
171 /* Return if new lookup was recursed to before. */
172 if (recursed_lookups.has (lookup_index))
173 return HB_VOID;
175 hb_set_t *old_before = before;
176 hb_set_t *old_input = input;
177 hb_set_t *old_after = after;
178 before = input = after = hb_set_get_empty ();
180 nesting_level_left--;
181 recurse_func (this, lookup_index);
182 nesting_level_left++;
184 before = old_before;
185 input = old_input;
186 after = old_after;
188 recursed_lookups.add (lookup_index);
190 return HB_VOID;
193 hb_face_t *face;
194 hb_set_t *before;
195 hb_set_t *input;
196 hb_set_t *after;
197 hb_set_t *output;
198 recurse_func_t recurse_func;
199 hb_set_t recursed_lookups;
200 unsigned int nesting_level_left;
201 unsigned int debug_depth;
203 hb_collect_glyphs_context_t (hb_face_t *face_,
204 hb_set_t *glyphs_before, /* OUT. May be NULL */
205 hb_set_t *glyphs_input, /* OUT. May be NULL */
206 hb_set_t *glyphs_after, /* OUT. May be NULL */
207 hb_set_t *glyphs_output, /* OUT. May be NULL */
208 unsigned int nesting_level_left_ = MAX_NESTING_LEVEL) :
209 face (face_),
210 before (glyphs_before ? glyphs_before : hb_set_get_empty ()),
211 input (glyphs_input ? glyphs_input : hb_set_get_empty ()),
212 after (glyphs_after ? glyphs_after : hb_set_get_empty ()),
213 output (glyphs_output ? glyphs_output : hb_set_get_empty ()),
214 recurse_func (NULL),
215 recursed_lookups (),
216 nesting_level_left (nesting_level_left_),
217 debug_depth (0)
219 recursed_lookups.init ();
221 ~hb_collect_glyphs_context_t (void)
223 recursed_lookups.fini ();
226 void set_recurse_func (recurse_func_t func) { recurse_func = func; }
231 #ifndef HB_DEBUG_GET_COVERAGE
232 #define HB_DEBUG_GET_COVERAGE (HB_DEBUG+0)
233 #endif
235 template <typename set_t>
236 struct hb_add_coverage_context_t
238 inline const char *get_name (void) { return "GET_COVERAGE"; }
239 static const unsigned int max_debug_depth = HB_DEBUG_GET_COVERAGE;
240 typedef const Coverage &return_t;
241 template <typename T, typename F>
242 inline bool may_dispatch (const T *obj, const F *format) { return true; }
243 template <typename T>
244 inline return_t dispatch (const T &obj) { return obj.get_coverage (); }
245 static return_t default_return_value (void) { return Null(Coverage); }
246 bool stop_sublookup_iteration (return_t r) const
248 r.add_coverage (set);
249 return false;
252 hb_add_coverage_context_t (set_t *set_) :
253 set (set_),
254 debug_depth (0) {}
256 set_t *set;
257 unsigned int debug_depth;
262 #ifndef HB_DEBUG_APPLY
263 #define HB_DEBUG_APPLY (HB_DEBUG+0)
264 #endif
266 #define TRACE_APPLY(this) \
267 hb_auto_trace_t<HB_DEBUG_APPLY, bool> trace \
268 (&c->debug_depth, c->get_name (), this, HB_FUNC, \
269 "idx %d codepoint %u", c->buffer->idx, c->buffer->cur().codepoint);
271 struct hb_apply_context_t
273 struct matcher_t
275 inline matcher_t (void) :
276 lookup_props (0),
277 ignore_zwnj (false),
278 ignore_zwj (false),
279 mask (-1),
280 #define arg1(arg) (arg) /* Remove the macro to see why it's needed! */
281 syllable arg1(0),
282 #undef arg1
283 match_func (NULL),
284 match_data (NULL) {};
286 typedef bool (*match_func_t) (hb_codepoint_t glyph_id, const USHORT &value, const void *data);
288 inline void set_ignore_zwnj (bool ignore_zwnj_) { ignore_zwnj = ignore_zwnj_; }
289 inline void set_ignore_zwj (bool ignore_zwj_) { ignore_zwj = ignore_zwj_; }
290 inline void set_lookup_props (unsigned int lookup_props_) { lookup_props = lookup_props_; }
291 inline void set_mask (hb_mask_t mask_) { mask = mask_; }
292 inline void set_syllable (uint8_t syllable_) { syllable = syllable_; }
293 inline void set_match_func (match_func_t match_func_,
294 const void *match_data_)
295 { match_func = match_func_; match_data = match_data_; }
297 enum may_match_t {
298 MATCH_NO,
299 MATCH_YES,
300 MATCH_MAYBE
303 inline may_match_t may_match (const hb_glyph_info_t &info,
304 const USHORT *glyph_data) const
306 if (!(info.mask & mask) ||
307 (syllable && syllable != info.syllable ()))
308 return MATCH_NO;
310 if (match_func)
311 return match_func (info.codepoint, *glyph_data, match_data) ? MATCH_YES : MATCH_NO;
313 return MATCH_MAYBE;
316 enum may_skip_t {
317 SKIP_NO,
318 SKIP_YES,
319 SKIP_MAYBE
322 inline may_skip_t
323 may_skip (const hb_apply_context_t *c,
324 const hb_glyph_info_t &info) const
326 if (!c->check_glyph_property (&info, lookup_props))
327 return SKIP_YES;
329 if (unlikely (_hb_glyph_info_is_default_ignorable (&info) &&
330 (ignore_zwnj || !_hb_glyph_info_is_zwnj (&info)) &&
331 (ignore_zwj || !_hb_glyph_info_is_zwj (&info)) &&
332 !_hb_glyph_info_ligated (&info)))
333 return SKIP_MAYBE;
335 return SKIP_NO;
338 protected:
339 unsigned int lookup_props;
340 bool ignore_zwnj;
341 bool ignore_zwj;
342 hb_mask_t mask;
343 uint8_t syllable;
344 match_func_t match_func;
345 const void *match_data;
348 struct skipping_iterator_t
350 inline void init (hb_apply_context_t *c_, bool context_match = false)
352 c = c_;
353 match_glyph_data = NULL,
354 matcher.set_match_func (NULL, NULL);
355 matcher.set_lookup_props (c->lookup_props);
356 /* Ignore ZWNJ if we are matching GSUB context, or matching GPOS. */
357 matcher.set_ignore_zwnj (context_match || c->table_index == 1);
358 /* Ignore ZWJ if we are matching GSUB context, or matching GPOS, or if asked to. */
359 matcher.set_ignore_zwj (context_match || c->table_index == 1 || c->auto_zwj);
360 matcher.set_mask (context_match ? -1 : c->lookup_mask);
362 inline void set_lookup_props (unsigned int lookup_props)
364 matcher.set_lookup_props (lookup_props);
366 inline void set_match_func (matcher_t::match_func_t match_func,
367 const void *match_data,
368 const USHORT glyph_data[])
370 matcher.set_match_func (match_func, match_data);
371 match_glyph_data = glyph_data;
374 inline void reset (unsigned int start_index_,
375 unsigned int num_items_)
377 idx = start_index_;
378 num_items = num_items_;
379 end = c->buffer->len;
380 matcher.set_syllable (start_index_ == c->buffer->idx ? c->buffer->cur().syllable () : 0);
383 inline void reject (void) { num_items++; match_glyph_data--; }
385 inline bool next (void)
387 assert (num_items > 0);
388 while (idx + num_items < end)
390 idx++;
391 const hb_glyph_info_t &info = c->buffer->info[idx];
393 matcher_t::may_skip_t skip = matcher.may_skip (c, info);
394 if (unlikely (skip == matcher_t::SKIP_YES))
395 continue;
397 matcher_t::may_match_t match = matcher.may_match (info, match_glyph_data);
398 if (match == matcher_t::MATCH_YES ||
399 (match == matcher_t::MATCH_MAYBE &&
400 skip == matcher_t::SKIP_NO))
402 num_items--;
403 match_glyph_data++;
404 return true;
407 if (skip == matcher_t::SKIP_NO)
408 return false;
410 return false;
412 inline bool prev (void)
414 assert (num_items > 0);
415 while (idx >= num_items)
417 idx--;
418 const hb_glyph_info_t &info = c->buffer->out_info[idx];
420 matcher_t::may_skip_t skip = matcher.may_skip (c, info);
421 if (unlikely (skip == matcher_t::SKIP_YES))
422 continue;
424 matcher_t::may_match_t match = matcher.may_match (info, match_glyph_data);
425 if (match == matcher_t::MATCH_YES ||
426 (match == matcher_t::MATCH_MAYBE &&
427 skip == matcher_t::SKIP_NO))
429 num_items--;
430 match_glyph_data++;
431 return true;
434 if (skip == matcher_t::SKIP_NO)
435 return false;
437 return false;
440 unsigned int idx;
441 protected:
442 hb_apply_context_t *c;
443 matcher_t matcher;
444 const USHORT *match_glyph_data;
446 unsigned int num_items;
447 unsigned int end;
451 inline const char *get_name (void) { return "APPLY"; }
452 static const unsigned int max_debug_depth = HB_DEBUG_APPLY;
453 typedef bool return_t;
454 typedef return_t (*recurse_func_t) (hb_apply_context_t *c, unsigned int lookup_index);
455 template <typename T, typename F>
456 inline bool may_dispatch (const T *obj, const F *format) { return true; }
457 template <typename T>
458 inline return_t dispatch (const T &obj) { return obj.apply (this); }
459 static return_t default_return_value (void) { return false; }
460 bool stop_sublookup_iteration (return_t r) const { return r; }
461 return_t recurse (unsigned int lookup_index)
463 if (unlikely (nesting_level_left == 0 || !recurse_func))
464 return default_return_value ();
466 nesting_level_left--;
467 bool ret = recurse_func (this, lookup_index);
468 nesting_level_left++;
469 return ret;
472 unsigned int table_index; /* GSUB/GPOS */
473 hb_font_t *font;
474 hb_face_t *face;
475 hb_buffer_t *buffer;
476 hb_direction_t direction;
477 hb_mask_t lookup_mask;
478 bool auto_zwj;
479 recurse_func_t recurse_func;
480 unsigned int nesting_level_left;
481 unsigned int lookup_props;
482 const GDEF &gdef;
483 bool has_glyph_classes;
484 skipping_iterator_t iter_input, iter_context;
485 unsigned int debug_depth;
488 hb_apply_context_t (unsigned int table_index_,
489 hb_font_t *font_,
490 hb_buffer_t *buffer_) :
491 table_index (table_index_),
492 font (font_), face (font->face), buffer (buffer_),
493 direction (buffer_->props.direction),
494 lookup_mask (1),
495 auto_zwj (true),
496 recurse_func (NULL),
497 nesting_level_left (MAX_NESTING_LEVEL),
498 lookup_props (0),
499 gdef (*hb_ot_layout_from_face (face)->gdef),
500 has_glyph_classes (gdef.has_glyph_classes ()),
501 iter_input (),
502 iter_context (),
503 debug_depth (0) {}
505 inline void set_lookup_mask (hb_mask_t mask) { lookup_mask = mask; }
506 inline void set_auto_zwj (bool auto_zwj_) { auto_zwj = auto_zwj_; }
507 inline void set_recurse_func (recurse_func_t func) { recurse_func = func; }
508 inline void set_lookup (const Lookup &l) { set_lookup_props (l.get_props ()); }
509 inline void set_lookup_props (unsigned int lookup_props_)
511 lookup_props = lookup_props_;
512 iter_input.init (this, false);
513 iter_context.init (this, true);
516 inline bool
517 match_properties_mark (hb_codepoint_t glyph,
518 unsigned int glyph_props,
519 unsigned int lookup_props) const
521 /* If using mark filtering sets, the high short of
522 * lookup_props has the set index.
524 if (lookup_props & LookupFlag::UseMarkFilteringSet)
525 return gdef.mark_set_covers (lookup_props >> 16, glyph);
527 /* The second byte of lookup_props has the meaning
528 * "ignore marks of attachment type different than
529 * the attachment type specified."
531 if (lookup_props & LookupFlag::MarkAttachmentType)
532 return (lookup_props & LookupFlag::MarkAttachmentType) == (glyph_props & LookupFlag::MarkAttachmentType);
534 return true;
537 inline bool
538 check_glyph_property (const hb_glyph_info_t *info,
539 unsigned int lookup_props) const
541 hb_codepoint_t glyph = info->codepoint;
542 unsigned int glyph_props = _hb_glyph_info_get_glyph_props (info);
544 /* Not covered, if, for example, glyph class is ligature and
545 * lookup_props includes LookupFlags::IgnoreLigatures
547 if (glyph_props & lookup_props & LookupFlag::IgnoreFlags)
548 return false;
550 if (unlikely (glyph_props & HB_OT_LAYOUT_GLYPH_PROPS_MARK))
551 return match_properties_mark (glyph, glyph_props, lookup_props);
553 return true;
556 inline void _set_glyph_props (hb_codepoint_t glyph_index,
557 unsigned int class_guess = 0,
558 bool ligature = false,
559 bool component = false) const
561 unsigned int add_in = _hb_glyph_info_get_glyph_props (&buffer->cur()) &
562 HB_OT_LAYOUT_GLYPH_PROPS_PRESERVE;
563 add_in |= HB_OT_LAYOUT_GLYPH_PROPS_SUBSTITUTED;
564 if (ligature)
566 add_in |= HB_OT_LAYOUT_GLYPH_PROPS_LIGATED;
567 /* In the only place that the MULTIPLIED bit is used, Uniscribe
568 * seems to only care about the "last" transformation between
569 * Ligature and Multiple substitions. Ie. if you ligate, expand,
570 * and ligate again, it forgives the multiplication and acts as
571 * if only ligation happened. As such, clear MULTIPLIED bit.
573 add_in &= ~HB_OT_LAYOUT_GLYPH_PROPS_MULTIPLIED;
575 if (component)
576 add_in |= HB_OT_LAYOUT_GLYPH_PROPS_MULTIPLIED;
577 if (likely (has_glyph_classes))
578 _hb_glyph_info_set_glyph_props (&buffer->cur(), add_in | gdef.get_glyph_props (glyph_index));
579 else if (class_guess)
580 _hb_glyph_info_set_glyph_props (&buffer->cur(), add_in | class_guess);
583 inline void replace_glyph (hb_codepoint_t glyph_index) const
585 _set_glyph_props (glyph_index);
586 buffer->replace_glyph (glyph_index);
588 inline void replace_glyph_inplace (hb_codepoint_t glyph_index) const
590 _set_glyph_props (glyph_index);
591 buffer->cur().codepoint = glyph_index;
593 inline void replace_glyph_with_ligature (hb_codepoint_t glyph_index,
594 unsigned int class_guess) const
596 _set_glyph_props (glyph_index, class_guess, true);
597 buffer->replace_glyph (glyph_index);
599 inline void output_glyph_for_component (hb_codepoint_t glyph_index,
600 unsigned int class_guess) const
602 _set_glyph_props (glyph_index, class_guess, false, true);
603 buffer->output_glyph (glyph_index);
609 typedef bool (*intersects_func_t) (hb_set_t *glyphs, const USHORT &value, const void *data);
610 typedef void (*collect_glyphs_func_t) (hb_set_t *glyphs, const USHORT &value, const void *data);
611 typedef bool (*match_func_t) (hb_codepoint_t glyph_id, const USHORT &value, const void *data);
613 struct ContextClosureFuncs
615 intersects_func_t intersects;
617 struct ContextCollectGlyphsFuncs
619 collect_glyphs_func_t collect;
621 struct ContextApplyFuncs
623 match_func_t match;
627 static inline bool intersects_glyph (hb_set_t *glyphs, const USHORT &value, const void *data HB_UNUSED)
629 return glyphs->has (value);
631 static inline bool intersects_class (hb_set_t *glyphs, const USHORT &value, const void *data)
633 const ClassDef &class_def = *reinterpret_cast<const ClassDef *>(data);
634 return class_def.intersects_class (glyphs, value);
636 static inline bool intersects_coverage (hb_set_t *glyphs, const USHORT &value, const void *data)
638 const OffsetTo<Coverage> &coverage = (const OffsetTo<Coverage>&)value;
639 return (data+coverage).intersects (glyphs);
642 static inline bool intersects_array (hb_closure_context_t *c,
643 unsigned int count,
644 const USHORT values[],
645 intersects_func_t intersects_func,
646 const void *intersects_data)
648 for (unsigned int i = 0; i < count; i++)
649 if (likely (!intersects_func (c->glyphs, values[i], intersects_data)))
650 return false;
651 return true;
655 static inline void collect_glyph (hb_set_t *glyphs, const USHORT &value, const void *data HB_UNUSED)
657 glyphs->add (value);
659 static inline void collect_class (hb_set_t *glyphs, const USHORT &value, const void *data)
661 const ClassDef &class_def = *reinterpret_cast<const ClassDef *>(data);
662 class_def.add_class (glyphs, value);
664 static inline void collect_coverage (hb_set_t *glyphs, const USHORT &value, const void *data)
666 const OffsetTo<Coverage> &coverage = (const OffsetTo<Coverage>&)value;
667 (data+coverage).add_coverage (glyphs);
669 static inline void collect_array (hb_collect_glyphs_context_t *c HB_UNUSED,
670 hb_set_t *glyphs,
671 unsigned int count,
672 const USHORT values[],
673 collect_glyphs_func_t collect_func,
674 const void *collect_data)
676 for (unsigned int i = 0; i < count; i++)
677 collect_func (glyphs, values[i], collect_data);
681 static inline bool match_glyph (hb_codepoint_t glyph_id, const USHORT &value, const void *data HB_UNUSED)
683 return glyph_id == value;
685 static inline bool match_class (hb_codepoint_t glyph_id, const USHORT &value, const void *data)
687 const ClassDef &class_def = *reinterpret_cast<const ClassDef *>(data);
688 return class_def.get_class (glyph_id) == value;
690 static inline bool match_coverage (hb_codepoint_t glyph_id, const USHORT &value, const void *data)
692 const OffsetTo<Coverage> &coverage = (const OffsetTo<Coverage>&)value;
693 return (data+coverage).get_coverage (glyph_id) != NOT_COVERED;
696 static inline bool would_match_input (hb_would_apply_context_t *c,
697 unsigned int count, /* Including the first glyph (not matched) */
698 const USHORT input[], /* Array of input values--start with second glyph */
699 match_func_t match_func,
700 const void *match_data)
702 if (count != c->len)
703 return false;
705 for (unsigned int i = 1; i < count; i++)
706 if (likely (!match_func (c->glyphs[i], input[i - 1], match_data)))
707 return false;
709 return true;
711 static inline bool match_input (hb_apply_context_t *c,
712 unsigned int count, /* Including the first glyph (not matched) */
713 const USHORT input[], /* Array of input values--start with second glyph */
714 match_func_t match_func,
715 const void *match_data,
716 unsigned int *end_offset,
717 unsigned int match_positions[MAX_CONTEXT_LENGTH],
718 bool *p_is_mark_ligature = NULL,
719 unsigned int *p_total_component_count = NULL)
721 TRACE_APPLY (NULL);
723 if (unlikely (count > MAX_CONTEXT_LENGTH)) TRACE_RETURN (false);
725 hb_buffer_t *buffer = c->buffer;
727 hb_apply_context_t::skipping_iterator_t &skippy_iter = c->iter_input;
728 skippy_iter.reset (buffer->idx, count - 1);
729 skippy_iter.set_match_func (match_func, match_data, input);
732 * This is perhaps the trickiest part of OpenType... Remarks:
734 * - If all components of the ligature were marks, we call this a mark ligature.
736 * - If there is no GDEF, and the ligature is NOT a mark ligature, we categorize
737 * it as a ligature glyph.
739 * - Ligatures cannot be formed across glyphs attached to different components
740 * of previous ligatures. Eg. the sequence is LAM,SHADDA,LAM,FATHA,HEH, and
741 * LAM,LAM,HEH form a ligature, leaving SHADDA,FATHA next to eachother.
742 * However, it would be wrong to ligate that SHADDA,FATHA sequence.o
743 * There is an exception to this: If a ligature tries ligating with marks that
744 * belong to it itself, go ahead, assuming that the font designer knows what
745 * they are doing (otherwise it can break Indic stuff when a matra wants to
746 * ligate with a conjunct...)
749 bool is_mark_ligature = _hb_glyph_info_is_mark (&buffer->cur());
751 unsigned int total_component_count = 0;
752 total_component_count += _hb_glyph_info_get_lig_num_comps (&buffer->cur());
754 unsigned int first_lig_id = _hb_glyph_info_get_lig_id (&buffer->cur());
755 unsigned int first_lig_comp = _hb_glyph_info_get_lig_comp (&buffer->cur());
757 match_positions[0] = buffer->idx;
758 for (unsigned int i = 1; i < count; i++)
760 if (!skippy_iter.next ()) return TRACE_RETURN (false);
762 match_positions[i] = skippy_iter.idx;
764 unsigned int this_lig_id = _hb_glyph_info_get_lig_id (&buffer->info[skippy_iter.idx]);
765 unsigned int this_lig_comp = _hb_glyph_info_get_lig_comp (&buffer->info[skippy_iter.idx]);
767 if (first_lig_id && first_lig_comp) {
768 /* If first component was attached to a previous ligature component,
769 * all subsequent components should be attached to the same ligature
770 * component, otherwise we shouldn't ligate them. */
771 if (first_lig_id != this_lig_id || first_lig_comp != this_lig_comp)
772 return TRACE_RETURN (false);
773 } else {
774 /* If first component was NOT attached to a previous ligature component,
775 * all subsequent components should also NOT be attached to any ligature
776 * component, unless they are attached to the first component itself! */
777 if (this_lig_id && this_lig_comp && (this_lig_id != first_lig_id))
778 return TRACE_RETURN (false);
781 is_mark_ligature = is_mark_ligature && _hb_glyph_info_is_mark (&buffer->info[skippy_iter.idx]);
782 total_component_count += _hb_glyph_info_get_lig_num_comps (&buffer->info[skippy_iter.idx]);
785 *end_offset = skippy_iter.idx - buffer->idx + 1;
787 if (p_is_mark_ligature)
788 *p_is_mark_ligature = is_mark_ligature;
790 if (p_total_component_count)
791 *p_total_component_count = total_component_count;
793 return TRACE_RETURN (true);
795 static inline void ligate_input (hb_apply_context_t *c,
796 unsigned int count, /* Including the first glyph */
797 unsigned int match_positions[MAX_CONTEXT_LENGTH], /* Including the first glyph */
798 unsigned int match_length,
799 hb_codepoint_t lig_glyph,
800 bool is_mark_ligature,
801 unsigned int total_component_count)
803 TRACE_APPLY (NULL);
805 hb_buffer_t *buffer = c->buffer;
807 buffer->merge_clusters (buffer->idx, buffer->idx + match_length);
810 * - If it *is* a mark ligature, we don't allocate a new ligature id, and leave
811 * the ligature to keep its old ligature id. This will allow it to attach to
812 * a base ligature in GPOS. Eg. if the sequence is: LAM,LAM,SHADDA,FATHA,HEH,
813 * and LAM,LAM,HEH for a ligature, they will leave SHADDA and FATHA wit a
814 * ligature id and component value of 2. Then if SHADDA,FATHA form a ligature
815 * later, we don't want them to lose their ligature id/component, otherwise
816 * GPOS will fail to correctly position the mark ligature on top of the
817 * LAM,LAM,HEH ligature. See:
818 * https://bugzilla.gnome.org/show_bug.cgi?id=676343
820 * - If a ligature is formed of components that some of which are also ligatures
821 * themselves, and those ligature components had marks attached to *their*
822 * components, we have to attach the marks to the new ligature component
823 * positions! Now *that*'s tricky! And these marks may be following the
824 * last component of the whole sequence, so we should loop forward looking
825 * for them and update them.
827 * Eg. the sequence is LAM,LAM,SHADDA,FATHA,HEH, and the font first forms a
828 * 'calt' ligature of LAM,HEH, leaving the SHADDA and FATHA with a ligature
829 * id and component == 1. Now, during 'liga', the LAM and the LAM-HEH ligature
830 * form a LAM-LAM-HEH ligature. We need to reassign the SHADDA and FATHA to
831 * the new ligature with a component value of 2.
833 * This in fact happened to a font... See:
834 * https://bugzilla.gnome.org/show_bug.cgi?id=437633
837 unsigned int klass = is_mark_ligature ? 0 : HB_OT_LAYOUT_GLYPH_PROPS_LIGATURE;
838 unsigned int lig_id = is_mark_ligature ? 0 : _hb_allocate_lig_id (buffer);
839 unsigned int last_lig_id = _hb_glyph_info_get_lig_id (&buffer->cur());
840 unsigned int last_num_components = _hb_glyph_info_get_lig_num_comps (&buffer->cur());
841 unsigned int components_so_far = last_num_components;
843 if (!is_mark_ligature)
845 _hb_glyph_info_set_lig_props_for_ligature (&buffer->cur(), lig_id, total_component_count);
846 if (_hb_glyph_info_get_general_category (&buffer->cur()) == HB_UNICODE_GENERAL_CATEGORY_NON_SPACING_MARK)
848 _hb_glyph_info_set_general_category (&buffer->cur(), HB_UNICODE_GENERAL_CATEGORY_OTHER_LETTER);
849 _hb_glyph_info_set_modified_combining_class (&buffer->cur(), 0);
852 c->replace_glyph_with_ligature (lig_glyph, klass);
854 for (unsigned int i = 1; i < count; i++)
856 while (buffer->idx < match_positions[i])
858 if (!is_mark_ligature) {
859 unsigned int new_lig_comp = components_so_far - last_num_components +
860 MIN (MAX (_hb_glyph_info_get_lig_comp (&buffer->cur()), 1u), last_num_components);
861 _hb_glyph_info_set_lig_props_for_mark (&buffer->cur(), lig_id, new_lig_comp);
863 buffer->next_glyph ();
866 last_lig_id = _hb_glyph_info_get_lig_id (&buffer->cur());
867 last_num_components = _hb_glyph_info_get_lig_num_comps (&buffer->cur());
868 components_so_far += last_num_components;
870 /* Skip the base glyph */
871 buffer->idx++;
874 if (!is_mark_ligature && last_lig_id) {
875 /* Re-adjust components for any marks following. */
876 for (unsigned int i = buffer->idx; i < buffer->len; i++) {
877 if (last_lig_id == _hb_glyph_info_get_lig_id (&buffer->info[i])) {
878 unsigned int new_lig_comp = components_so_far - last_num_components +
879 MIN (MAX (_hb_glyph_info_get_lig_comp (&buffer->info[i]), 1u), last_num_components);
880 _hb_glyph_info_set_lig_props_for_mark (&buffer->info[i], lig_id, new_lig_comp);
881 } else
882 break;
885 TRACE_RETURN (true);
888 static inline bool match_backtrack (hb_apply_context_t *c,
889 unsigned int count,
890 const USHORT backtrack[],
891 match_func_t match_func,
892 const void *match_data)
894 TRACE_APPLY (NULL);
896 hb_apply_context_t::skipping_iterator_t &skippy_iter = c->iter_context;
897 skippy_iter.reset (c->buffer->backtrack_len (), count);
898 skippy_iter.set_match_func (match_func, match_data, backtrack);
900 for (unsigned int i = 0; i < count; i++)
901 if (!skippy_iter.prev ())
902 return TRACE_RETURN (false);
904 return TRACE_RETURN (true);
907 static inline bool match_lookahead (hb_apply_context_t *c,
908 unsigned int count,
909 const USHORT lookahead[],
910 match_func_t match_func,
911 const void *match_data,
912 unsigned int offset)
914 TRACE_APPLY (NULL);
916 hb_apply_context_t::skipping_iterator_t &skippy_iter = c->iter_context;
917 skippy_iter.reset (c->buffer->idx + offset - 1, count);
918 skippy_iter.set_match_func (match_func, match_data, lookahead);
920 for (unsigned int i = 0; i < count; i++)
921 if (!skippy_iter.next ())
922 return TRACE_RETURN (false);
924 return TRACE_RETURN (true);
929 struct LookupRecord
931 inline bool sanitize (hb_sanitize_context_t *c) const
933 TRACE_SANITIZE (this);
934 return TRACE_RETURN (c->check_struct (this));
937 USHORT sequenceIndex; /* Index into current glyph
938 * sequence--first glyph = 0 */
939 USHORT lookupListIndex; /* Lookup to apply to that
940 * position--zero--based */
941 public:
942 DEFINE_SIZE_STATIC (4);
946 template <typename context_t>
947 static inline void recurse_lookups (context_t *c,
948 unsigned int lookupCount,
949 const LookupRecord lookupRecord[] /* Array of LookupRecords--in design order */)
951 for (unsigned int i = 0; i < lookupCount; i++)
952 c->recurse (lookupRecord[i].lookupListIndex);
955 static inline bool apply_lookup (hb_apply_context_t *c,
956 unsigned int count, /* Including the first glyph */
957 unsigned int match_positions[MAX_CONTEXT_LENGTH], /* Including the first glyph */
958 unsigned int lookupCount,
959 const LookupRecord lookupRecord[], /* Array of LookupRecords--in design order */
960 unsigned int match_length)
962 TRACE_APPLY (NULL);
964 hb_buffer_t *buffer = c->buffer;
965 unsigned int end;
967 /* All positions are distance from beginning of *output* buffer.
968 * Adjust. */
970 unsigned int bl = buffer->backtrack_len ();
971 end = bl + match_length;
973 int delta = bl - buffer->idx;
974 /* Convert positions to new indexing. */
975 for (unsigned int j = 0; j < count; j++)
976 match_positions[j] += delta;
979 for (unsigned int i = 0; i < lookupCount; i++)
981 unsigned int idx = lookupRecord[i].sequenceIndex;
982 if (idx >= count)
983 continue;
985 buffer->move_to (match_positions[idx]);
987 unsigned int orig_len = buffer->backtrack_len () + buffer->lookahead_len ();
988 if (!c->recurse (lookupRecord[i].lookupListIndex))
989 continue;
991 unsigned int new_len = buffer->backtrack_len () + buffer->lookahead_len ();
992 int delta = new_len - orig_len;
994 if (!delta)
995 continue;
997 /* Recursed lookup changed buffer len. Adjust. */
999 /* end can't go back past the current match position.
1000 * Note: this is only true because we do NOT allow MultipleSubst
1001 * with zero sequence len. */
1002 end = MAX ((int) match_positions[idx] + 1, int (end) + delta);
1004 unsigned int next = idx + 1; /* next now is the position after the recursed lookup. */
1006 if (delta > 0)
1008 if (unlikely (delta + count > MAX_CONTEXT_LENGTH))
1009 break;
1011 else
1013 /* NOTE: delta is negative. */
1014 delta = MAX (delta, (int) next - (int) count);
1015 next -= delta;
1018 /* Shift! */
1019 memmove (match_positions + next + delta, match_positions + next,
1020 (count - next) * sizeof (match_positions[0]));
1021 next += delta;
1022 count += delta;
1024 /* Fill in new entries. */
1025 for (unsigned int j = idx + 1; j < next; j++)
1026 match_positions[j] = match_positions[j - 1] + 1;
1028 /* And fixup the rest. */
1029 for (; next < count; next++)
1030 match_positions[next] += delta;
1033 buffer->move_to (end);
1035 return TRACE_RETURN (true);
1040 /* Contextual lookups */
1042 struct ContextClosureLookupContext
1044 ContextClosureFuncs funcs;
1045 const void *intersects_data;
1048 struct ContextCollectGlyphsLookupContext
1050 ContextCollectGlyphsFuncs funcs;
1051 const void *collect_data;
1054 struct ContextApplyLookupContext
1056 ContextApplyFuncs funcs;
1057 const void *match_data;
1060 static inline void context_closure_lookup (hb_closure_context_t *c,
1061 unsigned int inputCount, /* Including the first glyph (not matched) */
1062 const USHORT input[], /* Array of input values--start with second glyph */
1063 unsigned int lookupCount,
1064 const LookupRecord lookupRecord[],
1065 ContextClosureLookupContext &lookup_context)
1067 if (intersects_array (c,
1068 inputCount ? inputCount - 1 : 0, input,
1069 lookup_context.funcs.intersects, lookup_context.intersects_data))
1070 recurse_lookups (c,
1071 lookupCount, lookupRecord);
1074 static inline void context_collect_glyphs_lookup (hb_collect_glyphs_context_t *c,
1075 unsigned int inputCount, /* Including the first glyph (not matched) */
1076 const USHORT input[], /* Array of input values--start with second glyph */
1077 unsigned int lookupCount,
1078 const LookupRecord lookupRecord[],
1079 ContextCollectGlyphsLookupContext &lookup_context)
1081 collect_array (c, c->input,
1082 inputCount ? inputCount - 1 : 0, input,
1083 lookup_context.funcs.collect, lookup_context.collect_data);
1084 recurse_lookups (c,
1085 lookupCount, lookupRecord);
1088 static inline bool context_would_apply_lookup (hb_would_apply_context_t *c,
1089 unsigned int inputCount, /* Including the first glyph (not matched) */
1090 const USHORT input[], /* Array of input values--start with second glyph */
1091 unsigned int lookupCount HB_UNUSED,
1092 const LookupRecord lookupRecord[] HB_UNUSED,
1093 ContextApplyLookupContext &lookup_context)
1095 return would_match_input (c,
1096 inputCount, input,
1097 lookup_context.funcs.match, lookup_context.match_data);
1099 static inline bool context_apply_lookup (hb_apply_context_t *c,
1100 unsigned int inputCount, /* Including the first glyph (not matched) */
1101 const USHORT input[], /* Array of input values--start with second glyph */
1102 unsigned int lookupCount,
1103 const LookupRecord lookupRecord[],
1104 ContextApplyLookupContext &lookup_context)
1106 unsigned int match_length = 0;
1107 unsigned int match_positions[MAX_CONTEXT_LENGTH];
1108 return match_input (c,
1109 inputCount, input,
1110 lookup_context.funcs.match, lookup_context.match_data,
1111 &match_length, match_positions)
1112 && apply_lookup (c,
1113 inputCount, match_positions,
1114 lookupCount, lookupRecord,
1115 match_length);
1118 struct Rule
1120 inline void closure (hb_closure_context_t *c, ContextClosureLookupContext &lookup_context) const
1122 TRACE_CLOSURE (this);
1123 const LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (inputZ, inputZ[0].static_size * (inputCount ? inputCount - 1 : 0));
1124 context_closure_lookup (c,
1125 inputCount, inputZ,
1126 lookupCount, lookupRecord,
1127 lookup_context);
1130 inline void collect_glyphs (hb_collect_glyphs_context_t *c, ContextCollectGlyphsLookupContext &lookup_context) const
1132 TRACE_COLLECT_GLYPHS (this);
1133 const LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (inputZ, inputZ[0].static_size * (inputCount ? inputCount - 1 : 0));
1134 context_collect_glyphs_lookup (c,
1135 inputCount, inputZ,
1136 lookupCount, lookupRecord,
1137 lookup_context);
1140 inline bool would_apply (hb_would_apply_context_t *c, ContextApplyLookupContext &lookup_context) const
1142 TRACE_WOULD_APPLY (this);
1143 const LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (inputZ, inputZ[0].static_size * (inputCount ? inputCount - 1 : 0));
1144 return TRACE_RETURN (context_would_apply_lookup (c, inputCount, inputZ, lookupCount, lookupRecord, lookup_context));
1147 inline bool apply (hb_apply_context_t *c, ContextApplyLookupContext &lookup_context) const
1149 TRACE_APPLY (this);
1150 const LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (inputZ, inputZ[0].static_size * (inputCount ? inputCount - 1 : 0));
1151 return TRACE_RETURN (context_apply_lookup (c, inputCount, inputZ, lookupCount, lookupRecord, lookup_context));
1154 public:
1155 inline bool sanitize (hb_sanitize_context_t *c) const
1157 TRACE_SANITIZE (this);
1158 return inputCount.sanitize (c)
1159 && lookupCount.sanitize (c)
1160 && c->check_range (inputZ,
1161 inputZ[0].static_size * inputCount
1162 + lookupRecordX[0].static_size * lookupCount);
1165 protected:
1166 USHORT inputCount; /* Total number of glyphs in input
1167 * glyph sequence--includes the first
1168 * glyph */
1169 USHORT lookupCount; /* Number of LookupRecords */
1170 USHORT inputZ[VAR]; /* Array of match inputs--start with
1171 * second glyph */
1172 LookupRecord lookupRecordX[VAR]; /* Array of LookupRecords--in
1173 * design order */
1174 public:
1175 DEFINE_SIZE_ARRAY2 (4, inputZ, lookupRecordX);
1178 struct RuleSet
1180 inline void closure (hb_closure_context_t *c, ContextClosureLookupContext &lookup_context) const
1182 TRACE_CLOSURE (this);
1183 unsigned int num_rules = rule.len;
1184 for (unsigned int i = 0; i < num_rules; i++)
1185 (this+rule[i]).closure (c, lookup_context);
1188 inline void collect_glyphs (hb_collect_glyphs_context_t *c, ContextCollectGlyphsLookupContext &lookup_context) const
1190 TRACE_COLLECT_GLYPHS (this);
1191 unsigned int num_rules = rule.len;
1192 for (unsigned int i = 0; i < num_rules; i++)
1193 (this+rule[i]).collect_glyphs (c, lookup_context);
1196 inline bool would_apply (hb_would_apply_context_t *c, ContextApplyLookupContext &lookup_context) const
1198 TRACE_WOULD_APPLY (this);
1199 unsigned int num_rules = rule.len;
1200 for (unsigned int i = 0; i < num_rules; i++)
1202 if ((this+rule[i]).would_apply (c, lookup_context))
1203 return TRACE_RETURN (true);
1205 return TRACE_RETURN (false);
1208 inline bool apply (hb_apply_context_t *c, ContextApplyLookupContext &lookup_context) const
1210 TRACE_APPLY (this);
1211 unsigned int num_rules = rule.len;
1212 for (unsigned int i = 0; i < num_rules; i++)
1214 if ((this+rule[i]).apply (c, lookup_context))
1215 return TRACE_RETURN (true);
1217 return TRACE_RETURN (false);
1220 inline bool sanitize (hb_sanitize_context_t *c) const
1222 TRACE_SANITIZE (this);
1223 return TRACE_RETURN (rule.sanitize (c, this));
1226 protected:
1227 OffsetArrayOf<Rule>
1228 rule; /* Array of Rule tables
1229 * ordered by preference */
1230 public:
1231 DEFINE_SIZE_ARRAY (2, rule);
1235 struct ContextFormat1
1237 inline void closure (hb_closure_context_t *c) const
1239 TRACE_CLOSURE (this);
1241 const Coverage &cov = (this+coverage);
1243 struct ContextClosureLookupContext lookup_context = {
1244 {intersects_glyph},
1245 NULL
1248 unsigned int count = ruleSet.len;
1249 for (unsigned int i = 0; i < count; i++)
1250 if (cov.intersects_coverage (c->glyphs, i)) {
1251 const RuleSet &rule_set = this+ruleSet[i];
1252 rule_set.closure (c, lookup_context);
1256 inline void collect_glyphs (hb_collect_glyphs_context_t *c) const
1258 TRACE_COLLECT_GLYPHS (this);
1259 (this+coverage).add_coverage (c->input);
1261 struct ContextCollectGlyphsLookupContext lookup_context = {
1262 {collect_glyph},
1263 NULL
1266 unsigned int count = ruleSet.len;
1267 for (unsigned int i = 0; i < count; i++)
1268 (this+ruleSet[i]).collect_glyphs (c, lookup_context);
1271 inline bool would_apply (hb_would_apply_context_t *c) const
1273 TRACE_WOULD_APPLY (this);
1275 const RuleSet &rule_set = this+ruleSet[(this+coverage).get_coverage (c->glyphs[0])];
1276 struct ContextApplyLookupContext lookup_context = {
1277 {match_glyph},
1278 NULL
1280 return TRACE_RETURN (rule_set.would_apply (c, lookup_context));
1283 inline const Coverage &get_coverage (void) const
1285 return this+coverage;
1288 inline bool apply (hb_apply_context_t *c) const
1290 TRACE_APPLY (this);
1291 unsigned int index = (this+coverage).get_coverage (c->buffer->cur().codepoint);
1292 if (likely (index == NOT_COVERED))
1293 return TRACE_RETURN (false);
1295 const RuleSet &rule_set = this+ruleSet[index];
1296 struct ContextApplyLookupContext lookup_context = {
1297 {match_glyph},
1298 NULL
1300 return TRACE_RETURN (rule_set.apply (c, lookup_context));
1303 inline bool sanitize (hb_sanitize_context_t *c) const
1305 TRACE_SANITIZE (this);
1306 return TRACE_RETURN (coverage.sanitize (c, this) && ruleSet.sanitize (c, this));
1309 protected:
1310 USHORT format; /* Format identifier--format = 1 */
1311 OffsetTo<Coverage>
1312 coverage; /* Offset to Coverage table--from
1313 * beginning of table */
1314 OffsetArrayOf<RuleSet>
1315 ruleSet; /* Array of RuleSet tables
1316 * ordered by Coverage Index */
1317 public:
1318 DEFINE_SIZE_ARRAY (6, ruleSet);
1322 struct ContextFormat2
1324 inline void closure (hb_closure_context_t *c) const
1326 TRACE_CLOSURE (this);
1327 if (!(this+coverage).intersects (c->glyphs))
1328 return;
1330 const ClassDef &class_def = this+classDef;
1332 struct ContextClosureLookupContext lookup_context = {
1333 {intersects_class},
1334 &class_def
1337 unsigned int count = ruleSet.len;
1338 for (unsigned int i = 0; i < count; i++)
1339 if (class_def.intersects_class (c->glyphs, i)) {
1340 const RuleSet &rule_set = this+ruleSet[i];
1341 rule_set.closure (c, lookup_context);
1345 inline void collect_glyphs (hb_collect_glyphs_context_t *c) const
1347 TRACE_COLLECT_GLYPHS (this);
1348 (this+coverage).add_coverage (c->input);
1350 const ClassDef &class_def = this+classDef;
1351 struct ContextCollectGlyphsLookupContext lookup_context = {
1352 {collect_class},
1353 &class_def
1356 unsigned int count = ruleSet.len;
1357 for (unsigned int i = 0; i < count; i++)
1358 (this+ruleSet[i]).collect_glyphs (c, lookup_context);
1361 inline bool would_apply (hb_would_apply_context_t *c) const
1363 TRACE_WOULD_APPLY (this);
1365 const ClassDef &class_def = this+classDef;
1366 unsigned int index = class_def.get_class (c->glyphs[0]);
1367 const RuleSet &rule_set = this+ruleSet[index];
1368 struct ContextApplyLookupContext lookup_context = {
1369 {match_class},
1370 &class_def
1372 return TRACE_RETURN (rule_set.would_apply (c, lookup_context));
1375 inline const Coverage &get_coverage (void) const
1377 return this+coverage;
1380 inline bool apply (hb_apply_context_t *c) const
1382 TRACE_APPLY (this);
1383 unsigned int index = (this+coverage).get_coverage (c->buffer->cur().codepoint);
1384 if (likely (index == NOT_COVERED)) return TRACE_RETURN (false);
1386 const ClassDef &class_def = this+classDef;
1387 index = class_def.get_class (c->buffer->cur().codepoint);
1388 const RuleSet &rule_set = this+ruleSet[index];
1389 struct ContextApplyLookupContext lookup_context = {
1390 {match_class},
1391 &class_def
1393 return TRACE_RETURN (rule_set.apply (c, lookup_context));
1396 inline bool sanitize (hb_sanitize_context_t *c) const
1398 TRACE_SANITIZE (this);
1399 return TRACE_RETURN (coverage.sanitize (c, this) && classDef.sanitize (c, this) && ruleSet.sanitize (c, this));
1402 protected:
1403 USHORT format; /* Format identifier--format = 2 */
1404 OffsetTo<Coverage>
1405 coverage; /* Offset to Coverage table--from
1406 * beginning of table */
1407 OffsetTo<ClassDef>
1408 classDef; /* Offset to glyph ClassDef table--from
1409 * beginning of table */
1410 OffsetArrayOf<RuleSet>
1411 ruleSet; /* Array of RuleSet tables
1412 * ordered by class */
1413 public:
1414 DEFINE_SIZE_ARRAY (8, ruleSet);
1418 struct ContextFormat3
1420 inline void closure (hb_closure_context_t *c) const
1422 TRACE_CLOSURE (this);
1423 if (!(this+coverageZ[0]).intersects (c->glyphs))
1424 return;
1426 const LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (coverageZ, coverageZ[0].static_size * glyphCount);
1427 struct ContextClosureLookupContext lookup_context = {
1428 {intersects_coverage},
1429 this
1431 context_closure_lookup (c,
1432 glyphCount, (const USHORT *) (coverageZ + 1),
1433 lookupCount, lookupRecord,
1434 lookup_context);
1437 inline void collect_glyphs (hb_collect_glyphs_context_t *c) const
1439 TRACE_COLLECT_GLYPHS (this);
1440 (this+coverageZ[0]).add_coverage (c->input);
1442 const LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (coverageZ, coverageZ[0].static_size * glyphCount);
1443 struct ContextCollectGlyphsLookupContext lookup_context = {
1444 {collect_coverage},
1445 this
1448 context_collect_glyphs_lookup (c,
1449 glyphCount, (const USHORT *) (coverageZ + 1),
1450 lookupCount, lookupRecord,
1451 lookup_context);
1454 inline bool would_apply (hb_would_apply_context_t *c) const
1456 TRACE_WOULD_APPLY (this);
1458 const LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (coverageZ, coverageZ[0].static_size * glyphCount);
1459 struct ContextApplyLookupContext lookup_context = {
1460 {match_coverage},
1461 this
1463 return TRACE_RETURN (context_would_apply_lookup (c, glyphCount, (const USHORT *) (coverageZ + 1), lookupCount, lookupRecord, lookup_context));
1466 inline const Coverage &get_coverage (void) const
1468 return this+coverageZ[0];
1471 inline bool apply (hb_apply_context_t *c) const
1473 TRACE_APPLY (this);
1474 unsigned int index = (this+coverageZ[0]).get_coverage (c->buffer->cur().codepoint);
1475 if (likely (index == NOT_COVERED)) return TRACE_RETURN (false);
1477 const LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (coverageZ, coverageZ[0].static_size * glyphCount);
1478 struct ContextApplyLookupContext lookup_context = {
1479 {match_coverage},
1480 this
1482 return TRACE_RETURN (context_apply_lookup (c, glyphCount, (const USHORT *) (coverageZ + 1), lookupCount, lookupRecord, lookup_context));
1485 inline bool sanitize (hb_sanitize_context_t *c) const
1487 TRACE_SANITIZE (this);
1488 if (!c->check_struct (this)) return TRACE_RETURN (false);
1489 unsigned int count = glyphCount;
1490 if (!count) return TRACE_RETURN (false); /* We want to access coverageZ[0] freely. */
1491 if (!c->check_array (coverageZ, coverageZ[0].static_size, count)) return TRACE_RETURN (false);
1492 for (unsigned int i = 0; i < count; i++)
1493 if (!coverageZ[i].sanitize (c, this)) return TRACE_RETURN (false);
1494 const LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (coverageZ, coverageZ[0].static_size * count);
1495 return TRACE_RETURN (c->check_array (lookupRecord, lookupRecord[0].static_size, lookupCount));
1498 protected:
1499 USHORT format; /* Format identifier--format = 3 */
1500 USHORT glyphCount; /* Number of glyphs in the input glyph
1501 * sequence */
1502 USHORT lookupCount; /* Number of LookupRecords */
1503 OffsetTo<Coverage>
1504 coverageZ[VAR]; /* Array of offsets to Coverage
1505 * table in glyph sequence order */
1506 LookupRecord lookupRecordX[VAR]; /* Array of LookupRecords--in
1507 * design order */
1508 public:
1509 DEFINE_SIZE_ARRAY2 (6, coverageZ, lookupRecordX);
1512 struct Context
1514 template <typename context_t>
1515 inline typename context_t::return_t dispatch (context_t *c) const
1517 TRACE_DISPATCH (this, u.format);
1518 if (unlikely (!c->may_dispatch (this, &u.format))) TRACE_RETURN (c->default_return_value ());
1519 switch (u.format) {
1520 case 1: return TRACE_RETURN (c->dispatch (u.format1));
1521 case 2: return TRACE_RETURN (c->dispatch (u.format2));
1522 case 3: return TRACE_RETURN (c->dispatch (u.format3));
1523 default:return TRACE_RETURN (c->default_return_value ());
1527 protected:
1528 union {
1529 USHORT format; /* Format identifier */
1530 ContextFormat1 format1;
1531 ContextFormat2 format2;
1532 ContextFormat3 format3;
1533 } u;
1537 /* Chaining Contextual lookups */
1539 struct ChainContextClosureLookupContext
1541 ContextClosureFuncs funcs;
1542 const void *intersects_data[3];
1545 struct ChainContextCollectGlyphsLookupContext
1547 ContextCollectGlyphsFuncs funcs;
1548 const void *collect_data[3];
1551 struct ChainContextApplyLookupContext
1553 ContextApplyFuncs funcs;
1554 const void *match_data[3];
1557 static inline void chain_context_closure_lookup (hb_closure_context_t *c,
1558 unsigned int backtrackCount,
1559 const USHORT backtrack[],
1560 unsigned int inputCount, /* Including the first glyph (not matched) */
1561 const USHORT input[], /* Array of input values--start with second glyph */
1562 unsigned int lookaheadCount,
1563 const USHORT lookahead[],
1564 unsigned int lookupCount,
1565 const LookupRecord lookupRecord[],
1566 ChainContextClosureLookupContext &lookup_context)
1568 if (intersects_array (c,
1569 backtrackCount, backtrack,
1570 lookup_context.funcs.intersects, lookup_context.intersects_data[0])
1571 && intersects_array (c,
1572 inputCount ? inputCount - 1 : 0, input,
1573 lookup_context.funcs.intersects, lookup_context.intersects_data[1])
1574 && intersects_array (c,
1575 lookaheadCount, lookahead,
1576 lookup_context.funcs.intersects, lookup_context.intersects_data[2]))
1577 recurse_lookups (c,
1578 lookupCount, lookupRecord);
1581 static inline void chain_context_collect_glyphs_lookup (hb_collect_glyphs_context_t *c,
1582 unsigned int backtrackCount,
1583 const USHORT backtrack[],
1584 unsigned int inputCount, /* Including the first glyph (not matched) */
1585 const USHORT input[], /* Array of input values--start with second glyph */
1586 unsigned int lookaheadCount,
1587 const USHORT lookahead[],
1588 unsigned int lookupCount,
1589 const LookupRecord lookupRecord[],
1590 ChainContextCollectGlyphsLookupContext &lookup_context)
1592 collect_array (c, c->before,
1593 backtrackCount, backtrack,
1594 lookup_context.funcs.collect, lookup_context.collect_data[0]);
1595 collect_array (c, c->input,
1596 inputCount ? inputCount - 1 : 0, input,
1597 lookup_context.funcs.collect, lookup_context.collect_data[1]);
1598 collect_array (c, c->after,
1599 lookaheadCount, lookahead,
1600 lookup_context.funcs.collect, lookup_context.collect_data[2]);
1601 recurse_lookups (c,
1602 lookupCount, lookupRecord);
1605 static inline bool chain_context_would_apply_lookup (hb_would_apply_context_t *c,
1606 unsigned int backtrackCount,
1607 const USHORT backtrack[] HB_UNUSED,
1608 unsigned int inputCount, /* Including the first glyph (not matched) */
1609 const USHORT input[], /* Array of input values--start with second glyph */
1610 unsigned int lookaheadCount,
1611 const USHORT lookahead[] HB_UNUSED,
1612 unsigned int lookupCount HB_UNUSED,
1613 const LookupRecord lookupRecord[] HB_UNUSED,
1614 ChainContextApplyLookupContext &lookup_context)
1616 return (c->zero_context ? !backtrackCount && !lookaheadCount : true)
1617 && would_match_input (c,
1618 inputCount, input,
1619 lookup_context.funcs.match, lookup_context.match_data[1]);
1622 static inline bool chain_context_apply_lookup (hb_apply_context_t *c,
1623 unsigned int backtrackCount,
1624 const USHORT backtrack[],
1625 unsigned int inputCount, /* Including the first glyph (not matched) */
1626 const USHORT input[], /* Array of input values--start with second glyph */
1627 unsigned int lookaheadCount,
1628 const USHORT lookahead[],
1629 unsigned int lookupCount,
1630 const LookupRecord lookupRecord[],
1631 ChainContextApplyLookupContext &lookup_context)
1633 unsigned int match_length = 0;
1634 unsigned int match_positions[MAX_CONTEXT_LENGTH];
1635 return match_input (c,
1636 inputCount, input,
1637 lookup_context.funcs.match, lookup_context.match_data[1],
1638 &match_length, match_positions)
1639 && match_backtrack (c,
1640 backtrackCount, backtrack,
1641 lookup_context.funcs.match, lookup_context.match_data[0])
1642 && match_lookahead (c,
1643 lookaheadCount, lookahead,
1644 lookup_context.funcs.match, lookup_context.match_data[2],
1645 match_length)
1646 && apply_lookup (c,
1647 inputCount, match_positions,
1648 lookupCount, lookupRecord,
1649 match_length);
1652 struct ChainRule
1654 inline void closure (hb_closure_context_t *c, ChainContextClosureLookupContext &lookup_context) const
1656 TRACE_CLOSURE (this);
1657 const HeadlessArrayOf<USHORT> &input = StructAfter<HeadlessArrayOf<USHORT> > (backtrack);
1658 const ArrayOf<USHORT> &lookahead = StructAfter<ArrayOf<USHORT> > (input);
1659 const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead);
1660 chain_context_closure_lookup (c,
1661 backtrack.len, backtrack.array,
1662 input.len, input.array,
1663 lookahead.len, lookahead.array,
1664 lookup.len, lookup.array,
1665 lookup_context);
1668 inline void collect_glyphs (hb_collect_glyphs_context_t *c, ChainContextCollectGlyphsLookupContext &lookup_context) const
1670 TRACE_COLLECT_GLYPHS (this);
1671 const HeadlessArrayOf<USHORT> &input = StructAfter<HeadlessArrayOf<USHORT> > (backtrack);
1672 const ArrayOf<USHORT> &lookahead = StructAfter<ArrayOf<USHORT> > (input);
1673 const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead);
1674 chain_context_collect_glyphs_lookup (c,
1675 backtrack.len, backtrack.array,
1676 input.len, input.array,
1677 lookahead.len, lookahead.array,
1678 lookup.len, lookup.array,
1679 lookup_context);
1682 inline bool would_apply (hb_would_apply_context_t *c, ChainContextApplyLookupContext &lookup_context) const
1684 TRACE_WOULD_APPLY (this);
1685 const HeadlessArrayOf<USHORT> &input = StructAfter<HeadlessArrayOf<USHORT> > (backtrack);
1686 const ArrayOf<USHORT> &lookahead = StructAfter<ArrayOf<USHORT> > (input);
1687 const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead);
1688 return TRACE_RETURN (chain_context_would_apply_lookup (c,
1689 backtrack.len, backtrack.array,
1690 input.len, input.array,
1691 lookahead.len, lookahead.array, lookup.len,
1692 lookup.array, lookup_context));
1695 inline bool apply (hb_apply_context_t *c, ChainContextApplyLookupContext &lookup_context) const
1697 TRACE_APPLY (this);
1698 const HeadlessArrayOf<USHORT> &input = StructAfter<HeadlessArrayOf<USHORT> > (backtrack);
1699 const ArrayOf<USHORT> &lookahead = StructAfter<ArrayOf<USHORT> > (input);
1700 const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead);
1701 return TRACE_RETURN (chain_context_apply_lookup (c,
1702 backtrack.len, backtrack.array,
1703 input.len, input.array,
1704 lookahead.len, lookahead.array, lookup.len,
1705 lookup.array, lookup_context));
1708 inline bool sanitize (hb_sanitize_context_t *c) const
1710 TRACE_SANITIZE (this);
1711 if (!backtrack.sanitize (c)) return TRACE_RETURN (false);
1712 const HeadlessArrayOf<USHORT> &input = StructAfter<HeadlessArrayOf<USHORT> > (backtrack);
1713 if (!input.sanitize (c)) return TRACE_RETURN (false);
1714 const ArrayOf<USHORT> &lookahead = StructAfter<ArrayOf<USHORT> > (input);
1715 if (!lookahead.sanitize (c)) return TRACE_RETURN (false);
1716 const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead);
1717 return TRACE_RETURN (lookup.sanitize (c));
1720 protected:
1721 ArrayOf<USHORT>
1722 backtrack; /* Array of backtracking values
1723 * (to be matched before the input
1724 * sequence) */
1725 HeadlessArrayOf<USHORT>
1726 inputX; /* Array of input values (start with
1727 * second glyph) */
1728 ArrayOf<USHORT>
1729 lookaheadX; /* Array of lookahead values's (to be
1730 * matched after the input sequence) */
1731 ArrayOf<LookupRecord>
1732 lookupX; /* Array of LookupRecords--in
1733 * design order) */
1734 public:
1735 DEFINE_SIZE_MIN (8);
1738 struct ChainRuleSet
1740 inline void closure (hb_closure_context_t *c, ChainContextClosureLookupContext &lookup_context) const
1742 TRACE_CLOSURE (this);
1743 unsigned int num_rules = rule.len;
1744 for (unsigned int i = 0; i < num_rules; i++)
1745 (this+rule[i]).closure (c, lookup_context);
1748 inline void collect_glyphs (hb_collect_glyphs_context_t *c, ChainContextCollectGlyphsLookupContext &lookup_context) const
1750 TRACE_COLLECT_GLYPHS (this);
1751 unsigned int num_rules = rule.len;
1752 for (unsigned int i = 0; i < num_rules; i++)
1753 (this+rule[i]).collect_glyphs (c, lookup_context);
1756 inline bool would_apply (hb_would_apply_context_t *c, ChainContextApplyLookupContext &lookup_context) const
1758 TRACE_WOULD_APPLY (this);
1759 unsigned int num_rules = rule.len;
1760 for (unsigned int i = 0; i < num_rules; i++)
1761 if ((this+rule[i]).would_apply (c, lookup_context))
1762 return TRACE_RETURN (true);
1764 return TRACE_RETURN (false);
1767 inline bool apply (hb_apply_context_t *c, ChainContextApplyLookupContext &lookup_context) const
1769 TRACE_APPLY (this);
1770 unsigned int num_rules = rule.len;
1771 for (unsigned int i = 0; i < num_rules; i++)
1772 if ((this+rule[i]).apply (c, lookup_context))
1773 return TRACE_RETURN (true);
1775 return TRACE_RETURN (false);
1778 inline bool sanitize (hb_sanitize_context_t *c) const
1780 TRACE_SANITIZE (this);
1781 return TRACE_RETURN (rule.sanitize (c, this));
1784 protected:
1785 OffsetArrayOf<ChainRule>
1786 rule; /* Array of ChainRule tables
1787 * ordered by preference */
1788 public:
1789 DEFINE_SIZE_ARRAY (2, rule);
1792 struct ChainContextFormat1
1794 inline void closure (hb_closure_context_t *c) const
1796 TRACE_CLOSURE (this);
1797 const Coverage &cov = (this+coverage);
1799 struct ChainContextClosureLookupContext lookup_context = {
1800 {intersects_glyph},
1801 {NULL, NULL, NULL}
1804 unsigned int count = ruleSet.len;
1805 for (unsigned int i = 0; i < count; i++)
1806 if (cov.intersects_coverage (c->glyphs, i)) {
1807 const ChainRuleSet &rule_set = this+ruleSet[i];
1808 rule_set.closure (c, lookup_context);
1812 inline void collect_glyphs (hb_collect_glyphs_context_t *c) const
1814 TRACE_COLLECT_GLYPHS (this);
1815 (this+coverage).add_coverage (c->input);
1817 struct ChainContextCollectGlyphsLookupContext lookup_context = {
1818 {collect_glyph},
1819 {NULL, NULL, NULL}
1822 unsigned int count = ruleSet.len;
1823 for (unsigned int i = 0; i < count; i++)
1824 (this+ruleSet[i]).collect_glyphs (c, lookup_context);
1827 inline bool would_apply (hb_would_apply_context_t *c) const
1829 TRACE_WOULD_APPLY (this);
1831 const ChainRuleSet &rule_set = this+ruleSet[(this+coverage).get_coverage (c->glyphs[0])];
1832 struct ChainContextApplyLookupContext lookup_context = {
1833 {match_glyph},
1834 {NULL, NULL, NULL}
1836 return TRACE_RETURN (rule_set.would_apply (c, lookup_context));
1839 inline const Coverage &get_coverage (void) const
1841 return this+coverage;
1844 inline bool apply (hb_apply_context_t *c) const
1846 TRACE_APPLY (this);
1847 unsigned int index = (this+coverage).get_coverage (c->buffer->cur().codepoint);
1848 if (likely (index == NOT_COVERED)) return TRACE_RETURN (false);
1850 const ChainRuleSet &rule_set = this+ruleSet[index];
1851 struct ChainContextApplyLookupContext lookup_context = {
1852 {match_glyph},
1853 {NULL, NULL, NULL}
1855 return TRACE_RETURN (rule_set.apply (c, lookup_context));
1858 inline bool sanitize (hb_sanitize_context_t *c) const
1860 TRACE_SANITIZE (this);
1861 return TRACE_RETURN (coverage.sanitize (c, this) && ruleSet.sanitize (c, this));
1864 protected:
1865 USHORT format; /* Format identifier--format = 1 */
1866 OffsetTo<Coverage>
1867 coverage; /* Offset to Coverage table--from
1868 * beginning of table */
1869 OffsetArrayOf<ChainRuleSet>
1870 ruleSet; /* Array of ChainRuleSet tables
1871 * ordered by Coverage Index */
1872 public:
1873 DEFINE_SIZE_ARRAY (6, ruleSet);
1876 struct ChainContextFormat2
1878 inline void closure (hb_closure_context_t *c) const
1880 TRACE_CLOSURE (this);
1881 if (!(this+coverage).intersects (c->glyphs))
1882 return;
1884 const ClassDef &backtrack_class_def = this+backtrackClassDef;
1885 const ClassDef &input_class_def = this+inputClassDef;
1886 const ClassDef &lookahead_class_def = this+lookaheadClassDef;
1888 struct ChainContextClosureLookupContext lookup_context = {
1889 {intersects_class},
1890 {&backtrack_class_def,
1891 &input_class_def,
1892 &lookahead_class_def}
1895 unsigned int count = ruleSet.len;
1896 for (unsigned int i = 0; i < count; i++)
1897 if (input_class_def.intersects_class (c->glyphs, i)) {
1898 const ChainRuleSet &rule_set = this+ruleSet[i];
1899 rule_set.closure (c, lookup_context);
1903 inline void collect_glyphs (hb_collect_glyphs_context_t *c) const
1905 TRACE_COLLECT_GLYPHS (this);
1906 (this+coverage).add_coverage (c->input);
1908 const ClassDef &backtrack_class_def = this+backtrackClassDef;
1909 const ClassDef &input_class_def = this+inputClassDef;
1910 const ClassDef &lookahead_class_def = this+lookaheadClassDef;
1912 struct ChainContextCollectGlyphsLookupContext lookup_context = {
1913 {collect_class},
1914 {&backtrack_class_def,
1915 &input_class_def,
1916 &lookahead_class_def}
1919 unsigned int count = ruleSet.len;
1920 for (unsigned int i = 0; i < count; i++)
1921 (this+ruleSet[i]).collect_glyphs (c, lookup_context);
1924 inline bool would_apply (hb_would_apply_context_t *c) const
1926 TRACE_WOULD_APPLY (this);
1928 const ClassDef &backtrack_class_def = this+backtrackClassDef;
1929 const ClassDef &input_class_def = this+inputClassDef;
1930 const ClassDef &lookahead_class_def = this+lookaheadClassDef;
1932 unsigned int index = input_class_def.get_class (c->glyphs[0]);
1933 const ChainRuleSet &rule_set = this+ruleSet[index];
1934 struct ChainContextApplyLookupContext lookup_context = {
1935 {match_class},
1936 {&backtrack_class_def,
1937 &input_class_def,
1938 &lookahead_class_def}
1940 return TRACE_RETURN (rule_set.would_apply (c, lookup_context));
1943 inline const Coverage &get_coverage (void) const
1945 return this+coverage;
1948 inline bool apply (hb_apply_context_t *c) const
1950 TRACE_APPLY (this);
1951 unsigned int index = (this+coverage).get_coverage (c->buffer->cur().codepoint);
1952 if (likely (index == NOT_COVERED)) return TRACE_RETURN (false);
1954 const ClassDef &backtrack_class_def = this+backtrackClassDef;
1955 const ClassDef &input_class_def = this+inputClassDef;
1956 const ClassDef &lookahead_class_def = this+lookaheadClassDef;
1958 index = input_class_def.get_class (c->buffer->cur().codepoint);
1959 const ChainRuleSet &rule_set = this+ruleSet[index];
1960 struct ChainContextApplyLookupContext lookup_context = {
1961 {match_class},
1962 {&backtrack_class_def,
1963 &input_class_def,
1964 &lookahead_class_def}
1966 return TRACE_RETURN (rule_set.apply (c, lookup_context));
1969 inline bool sanitize (hb_sanitize_context_t *c) const
1971 TRACE_SANITIZE (this);
1972 return TRACE_RETURN (coverage.sanitize (c, this) && backtrackClassDef.sanitize (c, this) &&
1973 inputClassDef.sanitize (c, this) && lookaheadClassDef.sanitize (c, this) &&
1974 ruleSet.sanitize (c, this));
1977 protected:
1978 USHORT format; /* Format identifier--format = 2 */
1979 OffsetTo<Coverage>
1980 coverage; /* Offset to Coverage table--from
1981 * beginning of table */
1982 OffsetTo<ClassDef>
1983 backtrackClassDef; /* Offset to glyph ClassDef table
1984 * containing backtrack sequence
1985 * data--from beginning of table */
1986 OffsetTo<ClassDef>
1987 inputClassDef; /* Offset to glyph ClassDef
1988 * table containing input sequence
1989 * data--from beginning of table */
1990 OffsetTo<ClassDef>
1991 lookaheadClassDef; /* Offset to glyph ClassDef table
1992 * containing lookahead sequence
1993 * data--from beginning of table */
1994 OffsetArrayOf<ChainRuleSet>
1995 ruleSet; /* Array of ChainRuleSet tables
1996 * ordered by class */
1997 public:
1998 DEFINE_SIZE_ARRAY (12, ruleSet);
2001 struct ChainContextFormat3
2003 inline void closure (hb_closure_context_t *c) const
2005 TRACE_CLOSURE (this);
2006 const OffsetArrayOf<Coverage> &input = StructAfter<OffsetArrayOf<Coverage> > (backtrack);
2008 if (!(this+input[0]).intersects (c->glyphs))
2009 return;
2011 const OffsetArrayOf<Coverage> &lookahead = StructAfter<OffsetArrayOf<Coverage> > (input);
2012 const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead);
2013 struct ChainContextClosureLookupContext lookup_context = {
2014 {intersects_coverage},
2015 {this, this, this}
2017 chain_context_closure_lookup (c,
2018 backtrack.len, (const USHORT *) backtrack.array,
2019 input.len, (const USHORT *) input.array + 1,
2020 lookahead.len, (const USHORT *) lookahead.array,
2021 lookup.len, lookup.array,
2022 lookup_context);
2025 inline void collect_glyphs (hb_collect_glyphs_context_t *c) const
2027 TRACE_COLLECT_GLYPHS (this);
2028 const OffsetArrayOf<Coverage> &input = StructAfter<OffsetArrayOf<Coverage> > (backtrack);
2030 (this+input[0]).add_coverage (c->input);
2032 const OffsetArrayOf<Coverage> &lookahead = StructAfter<OffsetArrayOf<Coverage> > (input);
2033 const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead);
2034 struct ChainContextCollectGlyphsLookupContext lookup_context = {
2035 {collect_coverage},
2036 {this, this, this}
2038 chain_context_collect_glyphs_lookup (c,
2039 backtrack.len, (const USHORT *) backtrack.array,
2040 input.len, (const USHORT *) input.array + 1,
2041 lookahead.len, (const USHORT *) lookahead.array,
2042 lookup.len, lookup.array,
2043 lookup_context);
2046 inline bool would_apply (hb_would_apply_context_t *c) const
2048 TRACE_WOULD_APPLY (this);
2050 const OffsetArrayOf<Coverage> &input = StructAfter<OffsetArrayOf<Coverage> > (backtrack);
2051 const OffsetArrayOf<Coverage> &lookahead = StructAfter<OffsetArrayOf<Coverage> > (input);
2052 const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead);
2053 struct ChainContextApplyLookupContext lookup_context = {
2054 {match_coverage},
2055 {this, this, this}
2057 return TRACE_RETURN (chain_context_would_apply_lookup (c,
2058 backtrack.len, (const USHORT *) backtrack.array,
2059 input.len, (const USHORT *) input.array + 1,
2060 lookahead.len, (const USHORT *) lookahead.array,
2061 lookup.len, lookup.array, lookup_context));
2064 inline const Coverage &get_coverage (void) const
2066 const OffsetArrayOf<Coverage> &input = StructAfter<OffsetArrayOf<Coverage> > (backtrack);
2067 return this+input[0];
2070 inline bool apply (hb_apply_context_t *c) const
2072 TRACE_APPLY (this);
2073 const OffsetArrayOf<Coverage> &input = StructAfter<OffsetArrayOf<Coverage> > (backtrack);
2075 unsigned int index = (this+input[0]).get_coverage (c->buffer->cur().codepoint);
2076 if (likely (index == NOT_COVERED)) return TRACE_RETURN (false);
2078 const OffsetArrayOf<Coverage> &lookahead = StructAfter<OffsetArrayOf<Coverage> > (input);
2079 const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead);
2080 struct ChainContextApplyLookupContext lookup_context = {
2081 {match_coverage},
2082 {this, this, this}
2084 return TRACE_RETURN (chain_context_apply_lookup (c,
2085 backtrack.len, (const USHORT *) backtrack.array,
2086 input.len, (const USHORT *) input.array + 1,
2087 lookahead.len, (const USHORT *) lookahead.array,
2088 lookup.len, lookup.array, lookup_context));
2091 inline bool sanitize (hb_sanitize_context_t *c) const
2093 TRACE_SANITIZE (this);
2094 if (!backtrack.sanitize (c, this)) return TRACE_RETURN (false);
2095 const OffsetArrayOf<Coverage> &input = StructAfter<OffsetArrayOf<Coverage> > (backtrack);
2096 if (!input.sanitize (c, this)) return TRACE_RETURN (false);
2097 if (!input.len) return TRACE_RETURN (false); /* To be consistent with Context. */
2098 const OffsetArrayOf<Coverage> &lookahead = StructAfter<OffsetArrayOf<Coverage> > (input);
2099 if (!lookahead.sanitize (c, this)) return TRACE_RETURN (false);
2100 const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead);
2101 return TRACE_RETURN (lookup.sanitize (c));
2104 protected:
2105 USHORT format; /* Format identifier--format = 3 */
2106 OffsetArrayOf<Coverage>
2107 backtrack; /* Array of coverage tables
2108 * in backtracking sequence, in glyph
2109 * sequence order */
2110 OffsetArrayOf<Coverage>
2111 inputX ; /* Array of coverage
2112 * tables in input sequence, in glyph
2113 * sequence order */
2114 OffsetArrayOf<Coverage>
2115 lookaheadX; /* Array of coverage tables
2116 * in lookahead sequence, in glyph
2117 * sequence order */
2118 ArrayOf<LookupRecord>
2119 lookupX; /* Array of LookupRecords--in
2120 * design order) */
2121 public:
2122 DEFINE_SIZE_MIN (10);
2125 struct ChainContext
2127 template <typename context_t>
2128 inline typename context_t::return_t dispatch (context_t *c) const
2130 TRACE_DISPATCH (this, u.format);
2131 if (unlikely (!c->may_dispatch (this, &u.format))) TRACE_RETURN (c->default_return_value ());
2132 switch (u.format) {
2133 case 1: return TRACE_RETURN (c->dispatch (u.format1));
2134 case 2: return TRACE_RETURN (c->dispatch (u.format2));
2135 case 3: return TRACE_RETURN (c->dispatch (u.format3));
2136 default:return TRACE_RETURN (c->default_return_value ());
2140 protected:
2141 union {
2142 USHORT format; /* Format identifier */
2143 ChainContextFormat1 format1;
2144 ChainContextFormat2 format2;
2145 ChainContextFormat3 format3;
2146 } u;
2150 template <typename T>
2151 struct ExtensionFormat1
2153 inline unsigned int get_type (void) const { return extensionLookupType; }
2155 template <typename X>
2156 inline const X& get_subtable (void) const
2158 unsigned int offset = extensionOffset;
2159 if (unlikely (!offset)) return Null(typename T::LookupSubTable);
2160 return StructAtOffset<typename T::LookupSubTable> (this, offset);
2163 template <typename context_t>
2164 inline typename context_t::return_t dispatch (context_t *c) const
2166 TRACE_DISPATCH (this, format);
2167 if (unlikely (!c->may_dispatch (this, this))) TRACE_RETURN (c->default_return_value ());
2168 return get_subtable<typename T::LookupSubTable> ().dispatch (c, get_type ());
2171 /* This is called from may_dispatch() above with hb_sanitize_context_t. */
2172 inline bool sanitize (hb_sanitize_context_t *c) const
2174 TRACE_SANITIZE (this);
2175 return TRACE_RETURN (c->check_struct (this) && extensionOffset != 0);
2178 protected:
2179 USHORT format; /* Format identifier. Set to 1. */
2180 USHORT extensionLookupType; /* Lookup type of subtable referenced
2181 * by ExtensionOffset (i.e. the
2182 * extension subtable). */
2183 ULONG extensionOffset; /* Offset to the extension subtable,
2184 * of lookup type subtable. */
2185 public:
2186 DEFINE_SIZE_STATIC (8);
2189 template <typename T>
2190 struct Extension
2192 inline unsigned int get_type (void) const
2194 switch (u.format) {
2195 case 1: return u.format1.get_type ();
2196 default:return 0;
2199 template <typename X>
2200 inline const X& get_subtable (void) const
2202 switch (u.format) {
2203 case 1: return u.format1.template get_subtable<typename T::LookupSubTable> ();
2204 default:return Null(typename T::LookupSubTable);
2208 template <typename context_t>
2209 inline typename context_t::return_t dispatch (context_t *c) const
2211 TRACE_DISPATCH (this, u.format);
2212 if (unlikely (!c->may_dispatch (this, &u.format))) TRACE_RETURN (c->default_return_value ());
2213 switch (u.format) {
2214 case 1: return TRACE_RETURN (u.format1.dispatch (c));
2215 default:return TRACE_RETURN (c->default_return_value ());
2219 protected:
2220 union {
2221 USHORT format; /* Format identifier */
2222 ExtensionFormat1<T> format1;
2223 } u;
2228 * GSUB/GPOS Common
2231 struct GSUBGPOS
2233 static const hb_tag_t GSUBTag = HB_OT_TAG_GSUB;
2234 static const hb_tag_t GPOSTag = HB_OT_TAG_GPOS;
2236 inline unsigned int get_script_count (void) const
2237 { return (this+scriptList).len; }
2238 inline const Tag& get_script_tag (unsigned int i) const
2239 { return (this+scriptList).get_tag (i); }
2240 inline unsigned int get_script_tags (unsigned int start_offset,
2241 unsigned int *script_count /* IN/OUT */,
2242 hb_tag_t *script_tags /* OUT */) const
2243 { return (this+scriptList).get_tags (start_offset, script_count, script_tags); }
2244 inline const Script& get_script (unsigned int i) const
2245 { return (this+scriptList)[i]; }
2246 inline bool find_script_index (hb_tag_t tag, unsigned int *index) const
2247 { return (this+scriptList).find_index (tag, index); }
2249 inline unsigned int get_feature_count (void) const
2250 { return (this+featureList).len; }
2251 inline hb_tag_t get_feature_tag (unsigned int i) const
2252 { return i == Index::NOT_FOUND_INDEX ? HB_TAG_NONE : (this+featureList).get_tag (i); }
2253 inline unsigned int get_feature_tags (unsigned int start_offset,
2254 unsigned int *feature_count /* IN/OUT */,
2255 hb_tag_t *feature_tags /* OUT */) const
2256 { return (this+featureList).get_tags (start_offset, feature_count, feature_tags); }
2257 inline const Feature& get_feature (unsigned int i) const
2258 { return (this+featureList)[i]; }
2259 inline bool find_feature_index (hb_tag_t tag, unsigned int *index) const
2260 { return (this+featureList).find_index (tag, index); }
2262 inline unsigned int get_lookup_count (void) const
2263 { return (this+lookupList).len; }
2264 inline const Lookup& get_lookup (unsigned int i) const
2265 { return (this+lookupList)[i]; }
2267 inline bool sanitize (hb_sanitize_context_t *c) const
2269 TRACE_SANITIZE (this);
2270 return TRACE_RETURN (version.sanitize (c) && likely (version.major == 1) &&
2271 scriptList.sanitize (c, this) &&
2272 featureList.sanitize (c, this) &&
2273 lookupList.sanitize (c, this));
2276 protected:
2277 FixedVersion version; /* Version of the GSUB/GPOS table--initially set
2278 * to 0x00010000u */
2279 OffsetTo<ScriptList>
2280 scriptList; /* ScriptList table */
2281 OffsetTo<FeatureList>
2282 featureList; /* FeatureList table */
2283 OffsetTo<LookupList>
2284 lookupList; /* LookupList table */
2285 public:
2286 DEFINE_SIZE_STATIC (10);
2290 } /* namespace OT */
2293 #endif /* HB_OT_LAYOUT_GSUBGPOS_PRIVATE_HH */