Restore variadic macros in DevToolsEmbedderMessageDispatcher
[chromium-blink-merge.git] / third_party / harfbuzz-ng / src / hb-ot-layout-gsubgpos-private.hh
blob3e74dfeee70eaa1671e93965ff0c32ca6cf3c2cc
1 /*
2 * Copyright © 2007,2008,2009,2010 Red Hat, Inc.
3 * Copyright © 2010,2012 Google, Inc.
5 * This is part of HarfBuzz, a text shaping library.
7 * Permission is hereby granted, without written agreement and without
8 * license or royalty fees, to use, copy, modify, and distribute this
9 * software and its documentation for any purpose, provided that the
10 * above copyright notice and the following two paragraphs appear in
11 * all copies of this software.
13 * IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR
14 * DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES
15 * ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN
16 * IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
17 * DAMAGE.
19 * THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING,
20 * BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
21 * FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE PROVIDED HEREUNDER IS
22 * ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO
23 * PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
25 * Red Hat Author(s): Behdad Esfahbod
26 * Google Author(s): Behdad Esfahbod
29 #ifndef HB_OT_LAYOUT_GSUBGPOS_PRIVATE_HH
30 #define HB_OT_LAYOUT_GSUBGPOS_PRIVATE_HH
32 #include "hb-buffer-private.hh"
33 #include "hb-ot-layout-gdef-table.hh"
34 #include "hb-set-private.hh"
37 namespace OT {
40 #ifndef HB_DEBUG_CLOSURE
41 #define HB_DEBUG_CLOSURE (HB_DEBUG+0)
42 #endif
44 #define TRACE_CLOSURE(this) \
45 hb_auto_trace_t<HB_DEBUG_CLOSURE, hb_void_t> trace \
46 (&c->debug_depth, c->get_name (), this, HB_FUNC, \
47 "");
49 struct hb_closure_context_t
51 inline const char *get_name (void) { return "CLOSURE"; }
52 static const unsigned int max_debug_depth = HB_DEBUG_CLOSURE;
53 typedef hb_void_t return_t;
54 typedef return_t (*recurse_func_t) (hb_closure_context_t *c, unsigned int lookup_index);
55 template <typename T, typename F>
56 inline bool may_dispatch (const T *obj, const F *format) { return true; }
57 template <typename T>
58 inline return_t dispatch (const T &obj) { obj.closure (this); return HB_VOID; }
59 static return_t default_return_value (void) { return HB_VOID; }
60 bool stop_sublookup_iteration (return_t r HB_UNUSED) const { return false; }
61 return_t recurse (unsigned int lookup_index)
63 if (unlikely (nesting_level_left == 0 || !recurse_func))
64 return default_return_value ();
66 nesting_level_left--;
67 recurse_func (this, lookup_index);
68 nesting_level_left++;
69 return HB_VOID;
72 hb_face_t *face;
73 hb_set_t *glyphs;
74 recurse_func_t recurse_func;
75 unsigned int nesting_level_left;
76 unsigned int debug_depth;
78 hb_closure_context_t (hb_face_t *face_,
79 hb_set_t *glyphs_,
80 unsigned int nesting_level_left_ = MAX_NESTING_LEVEL) :
81 face (face_),
82 glyphs (glyphs_),
83 recurse_func (NULL),
84 nesting_level_left (nesting_level_left_),
85 debug_depth (0) {}
87 void set_recurse_func (recurse_func_t func) { recurse_func = func; }
92 #ifndef HB_DEBUG_WOULD_APPLY
93 #define HB_DEBUG_WOULD_APPLY (HB_DEBUG+0)
94 #endif
96 #define TRACE_WOULD_APPLY(this) \
97 hb_auto_trace_t<HB_DEBUG_WOULD_APPLY, bool> trace \
98 (&c->debug_depth, c->get_name (), this, HB_FUNC, \
99 "%d glyphs", c->len);
101 struct hb_would_apply_context_t
103 inline const char *get_name (void) { return "WOULD_APPLY"; }
104 static const unsigned int max_debug_depth = HB_DEBUG_WOULD_APPLY;
105 typedef bool return_t;
106 template <typename T, typename F>
107 inline bool may_dispatch (const T *obj, const F *format) { return true; }
108 template <typename T>
109 inline return_t dispatch (const T &obj) { return obj.would_apply (this); }
110 static return_t default_return_value (void) { return false; }
111 bool stop_sublookup_iteration (return_t r) const { return r; }
113 hb_face_t *face;
114 const hb_codepoint_t *glyphs;
115 unsigned int len;
116 bool zero_context;
117 unsigned int debug_depth;
119 hb_would_apply_context_t (hb_face_t *face_,
120 const hb_codepoint_t *glyphs_,
121 unsigned int len_,
122 bool zero_context_) :
123 face (face_),
124 glyphs (glyphs_),
125 len (len_),
126 zero_context (zero_context_),
127 debug_depth (0) {}
132 #ifndef HB_DEBUG_COLLECT_GLYPHS
133 #define HB_DEBUG_COLLECT_GLYPHS (HB_DEBUG+0)
134 #endif
136 #define TRACE_COLLECT_GLYPHS(this) \
137 hb_auto_trace_t<HB_DEBUG_COLLECT_GLYPHS, hb_void_t> trace \
138 (&c->debug_depth, c->get_name (), this, HB_FUNC, \
139 "");
141 struct hb_collect_glyphs_context_t
143 inline const char *get_name (void) { return "COLLECT_GLYPHS"; }
144 static const unsigned int max_debug_depth = HB_DEBUG_COLLECT_GLYPHS;
145 typedef hb_void_t return_t;
146 typedef return_t (*recurse_func_t) (hb_collect_glyphs_context_t *c, unsigned int lookup_index);
147 template <typename T, typename F>
148 inline bool may_dispatch (const T *obj, const F *format) { return true; }
149 template <typename T>
150 inline return_t dispatch (const T &obj) { obj.collect_glyphs (this); return HB_VOID; }
151 static return_t default_return_value (void) { return HB_VOID; }
152 bool stop_sublookup_iteration (return_t r HB_UNUSED) const { return false; }
153 return_t recurse (unsigned int lookup_index)
155 if (unlikely (nesting_level_left == 0 || !recurse_func))
156 return default_return_value ();
158 /* Note that GPOS sets recurse_func to NULL already, so it doesn't get
159 * past the previous check. For GSUB, we only want to collect the output
160 * glyphs in the recursion. If output is not requested, we can go home now.
162 * Note further, that the above is not exactly correct. A recursed lookup
163 * is allowed to match input that is not matched in the context, but that's
164 * not how most fonts are built. It's possible to relax that and recurse
165 * with all sets here if it proves to be an issue.
168 if (output == hb_set_get_empty ())
169 return HB_VOID;
171 /* Return if new lookup was recursed to before. */
172 if (recursed_lookups.has (lookup_index))
173 return HB_VOID;
175 hb_set_t *old_before = before;
176 hb_set_t *old_input = input;
177 hb_set_t *old_after = after;
178 before = input = after = hb_set_get_empty ();
180 nesting_level_left--;
181 recurse_func (this, lookup_index);
182 nesting_level_left++;
184 before = old_before;
185 input = old_input;
186 after = old_after;
188 recursed_lookups.add (lookup_index);
190 return HB_VOID;
193 hb_face_t *face;
194 hb_set_t *before;
195 hb_set_t *input;
196 hb_set_t *after;
197 hb_set_t *output;
198 recurse_func_t recurse_func;
199 hb_set_t recursed_lookups;
200 unsigned int nesting_level_left;
201 unsigned int debug_depth;
203 hb_collect_glyphs_context_t (hb_face_t *face_,
204 hb_set_t *glyphs_before, /* OUT. May be NULL */
205 hb_set_t *glyphs_input, /* OUT. May be NULL */
206 hb_set_t *glyphs_after, /* OUT. May be NULL */
207 hb_set_t *glyphs_output, /* OUT. May be NULL */
208 unsigned int nesting_level_left_ = MAX_NESTING_LEVEL) :
209 face (face_),
210 before (glyphs_before ? glyphs_before : hb_set_get_empty ()),
211 input (glyphs_input ? glyphs_input : hb_set_get_empty ()),
212 after (glyphs_after ? glyphs_after : hb_set_get_empty ()),
213 output (glyphs_output ? glyphs_output : hb_set_get_empty ()),
214 recurse_func (NULL),
215 recursed_lookups (),
216 nesting_level_left (nesting_level_left_),
217 debug_depth (0)
219 recursed_lookups.init ();
221 ~hb_collect_glyphs_context_t (void)
223 recursed_lookups.fini ();
226 void set_recurse_func (recurse_func_t func) { recurse_func = func; }
231 #ifndef HB_DEBUG_GET_COVERAGE
232 #define HB_DEBUG_GET_COVERAGE (HB_DEBUG+0)
233 #endif
235 template <typename set_t>
236 struct hb_add_coverage_context_t
238 inline const char *get_name (void) { return "GET_COVERAGE"; }
239 static const unsigned int max_debug_depth = HB_DEBUG_GET_COVERAGE;
240 typedef const Coverage &return_t;
241 template <typename T, typename F>
242 inline bool may_dispatch (const T *obj, const F *format) { return true; }
243 template <typename T>
244 inline return_t dispatch (const T &obj) { return obj.get_coverage (); }
245 static return_t default_return_value (void) { return Null(Coverage); }
246 bool stop_sublookup_iteration (return_t r) const
248 r.add_coverage (set);
249 return false;
252 hb_add_coverage_context_t (set_t *set_) :
253 set (set_),
254 debug_depth (0) {}
256 set_t *set;
257 unsigned int debug_depth;
262 #ifndef HB_DEBUG_APPLY
263 #define HB_DEBUG_APPLY (HB_DEBUG+0)
264 #endif
266 #define TRACE_APPLY(this) \
267 hb_auto_trace_t<HB_DEBUG_APPLY, bool> trace \
268 (&c->debug_depth, c->get_name (), this, HB_FUNC, \
269 "idx %d gid %u lookup %d", \
270 c->buffer->idx, c->buffer->cur().codepoint, (int) c->lookup_index);
272 struct hb_apply_context_t
274 struct matcher_t
276 inline matcher_t (void) :
277 lookup_props (0),
278 ignore_zwnj (false),
279 ignore_zwj (false),
280 mask (-1),
281 #define arg1(arg) (arg) /* Remove the macro to see why it's needed! */
282 syllable arg1(0),
283 #undef arg1
284 match_func (NULL),
285 match_data (NULL) {};
287 typedef bool (*match_func_t) (hb_codepoint_t glyph_id, const USHORT &value, const void *data);
289 inline void set_ignore_zwnj (bool ignore_zwnj_) { ignore_zwnj = ignore_zwnj_; }
290 inline void set_ignore_zwj (bool ignore_zwj_) { ignore_zwj = ignore_zwj_; }
291 inline void set_lookup_props (unsigned int lookup_props_) { lookup_props = lookup_props_; }
292 inline void set_mask (hb_mask_t mask_) { mask = mask_; }
293 inline void set_syllable (uint8_t syllable_) { syllable = syllable_; }
294 inline void set_match_func (match_func_t match_func_,
295 const void *match_data_)
296 { match_func = match_func_; match_data = match_data_; }
298 enum may_match_t {
299 MATCH_NO,
300 MATCH_YES,
301 MATCH_MAYBE
304 inline may_match_t may_match (const hb_glyph_info_t &info,
305 const USHORT *glyph_data) const
307 if (!(info.mask & mask) ||
308 (syllable && syllable != info.syllable ()))
309 return MATCH_NO;
311 if (match_func)
312 return match_func (info.codepoint, *glyph_data, match_data) ? MATCH_YES : MATCH_NO;
314 return MATCH_MAYBE;
317 enum may_skip_t {
318 SKIP_NO,
319 SKIP_YES,
320 SKIP_MAYBE
323 inline may_skip_t
324 may_skip (const hb_apply_context_t *c,
325 const hb_glyph_info_t &info) const
327 if (!c->check_glyph_property (&info, lookup_props))
328 return SKIP_YES;
330 if (unlikely (_hb_glyph_info_is_default_ignorable (&info) &&
331 (ignore_zwnj || !_hb_glyph_info_is_zwnj (&info)) &&
332 (ignore_zwj || !_hb_glyph_info_is_zwj (&info))))
333 return SKIP_MAYBE;
335 return SKIP_NO;
338 protected:
339 unsigned int lookup_props;
340 bool ignore_zwnj;
341 bool ignore_zwj;
342 hb_mask_t mask;
343 uint8_t syllable;
344 match_func_t match_func;
345 const void *match_data;
348 struct skipping_iterator_t
350 inline void init (hb_apply_context_t *c_, bool context_match = false)
352 c = c_;
353 match_glyph_data = NULL,
354 matcher.set_match_func (NULL, NULL);
355 matcher.set_lookup_props (c->lookup_props);
356 /* Ignore ZWNJ if we are matching GSUB context, or matching GPOS. */
357 matcher.set_ignore_zwnj (context_match || c->table_index == 1);
358 /* Ignore ZWJ if we are matching GSUB context, or matching GPOS, or if asked to. */
359 matcher.set_ignore_zwj (context_match || c->table_index == 1 || c->auto_zwj);
360 matcher.set_mask (context_match ? -1 : c->lookup_mask);
362 inline void set_lookup_props (unsigned int lookup_props)
364 matcher.set_lookup_props (lookup_props);
366 inline void set_match_func (matcher_t::match_func_t match_func,
367 const void *match_data,
368 const USHORT glyph_data[])
370 matcher.set_match_func (match_func, match_data);
371 match_glyph_data = glyph_data;
374 inline void reset (unsigned int start_index_,
375 unsigned int num_items_)
377 idx = start_index_;
378 num_items = num_items_;
379 end = c->buffer->len;
380 matcher.set_syllable (start_index_ == c->buffer->idx ? c->buffer->cur().syllable () : 0);
383 inline void reject (void) { num_items++; match_glyph_data--; }
385 inline bool next (void)
387 assert (num_items > 0);
388 while (idx + num_items < end)
390 idx++;
391 const hb_glyph_info_t &info = c->buffer->info[idx];
393 matcher_t::may_skip_t skip = matcher.may_skip (c, info);
394 if (unlikely (skip == matcher_t::SKIP_YES))
395 continue;
397 matcher_t::may_match_t match = matcher.may_match (info, match_glyph_data);
398 if (match == matcher_t::MATCH_YES ||
399 (match == matcher_t::MATCH_MAYBE &&
400 skip == matcher_t::SKIP_NO))
402 num_items--;
403 match_glyph_data++;
404 return true;
407 if (skip == matcher_t::SKIP_NO)
408 return false;
410 return false;
412 inline bool prev (void)
414 assert (num_items > 0);
415 while (idx >= num_items)
417 idx--;
418 const hb_glyph_info_t &info = c->buffer->out_info[idx];
420 matcher_t::may_skip_t skip = matcher.may_skip (c, info);
421 if (unlikely (skip == matcher_t::SKIP_YES))
422 continue;
424 matcher_t::may_match_t match = matcher.may_match (info, match_glyph_data);
425 if (match == matcher_t::MATCH_YES ||
426 (match == matcher_t::MATCH_MAYBE &&
427 skip == matcher_t::SKIP_NO))
429 num_items--;
430 match_glyph_data++;
431 return true;
434 if (skip == matcher_t::SKIP_NO)
435 return false;
437 return false;
440 unsigned int idx;
441 protected:
442 hb_apply_context_t *c;
443 matcher_t matcher;
444 const USHORT *match_glyph_data;
446 unsigned int num_items;
447 unsigned int end;
451 inline const char *get_name (void) { return "APPLY"; }
452 static const unsigned int max_debug_depth = HB_DEBUG_APPLY;
453 typedef bool return_t;
454 typedef return_t (*recurse_func_t) (hb_apply_context_t *c, unsigned int lookup_index);
455 template <typename T, typename F>
456 inline bool may_dispatch (const T *obj, const F *format) { return true; }
457 template <typename T>
458 inline return_t dispatch (const T &obj) { return obj.apply (this); }
459 static return_t default_return_value (void) { return false; }
460 bool stop_sublookup_iteration (return_t r) const { return r; }
461 return_t recurse (unsigned int lookup_index)
463 if (unlikely (nesting_level_left == 0 || !recurse_func))
464 return default_return_value ();
466 nesting_level_left--;
467 bool ret = recurse_func (this, lookup_index);
468 nesting_level_left++;
469 return ret;
472 unsigned int table_index; /* GSUB/GPOS */
473 hb_font_t *font;
474 hb_face_t *face;
475 hb_buffer_t *buffer;
476 hb_direction_t direction;
477 hb_mask_t lookup_mask;
478 bool auto_zwj;
479 recurse_func_t recurse_func;
480 unsigned int nesting_level_left;
481 unsigned int lookup_props;
482 const GDEF &gdef;
483 bool has_glyph_classes;
484 skipping_iterator_t iter_input, iter_context;
485 unsigned int lookup_index;
486 unsigned int debug_depth;
489 hb_apply_context_t (unsigned int table_index_,
490 hb_font_t *font_,
491 hb_buffer_t *buffer_) :
492 table_index (table_index_),
493 font (font_), face (font->face), buffer (buffer_),
494 direction (buffer_->props.direction),
495 lookup_mask (1),
496 auto_zwj (true),
497 recurse_func (NULL),
498 nesting_level_left (MAX_NESTING_LEVEL),
499 lookup_props (0),
500 gdef (*hb_ot_layout_from_face (face)->gdef),
501 has_glyph_classes (gdef.has_glyph_classes ()),
502 iter_input (),
503 iter_context (),
504 lookup_index ((unsigned int) -1),
505 debug_depth (0) {}
507 inline void set_lookup_mask (hb_mask_t mask) { lookup_mask = mask; }
508 inline void set_auto_zwj (bool auto_zwj_) { auto_zwj = auto_zwj_; }
509 inline void set_recurse_func (recurse_func_t func) { recurse_func = func; }
510 inline void set_lookup_index (unsigned int lookup_index_) { lookup_index = lookup_index_; }
511 inline void set_lookup_props (unsigned int lookup_props_)
513 lookup_props = lookup_props_;
514 iter_input.init (this, false);
515 iter_context.init (this, true);
518 inline bool
519 match_properties_mark (hb_codepoint_t glyph,
520 unsigned int glyph_props,
521 unsigned int match_props) const
523 /* If using mark filtering sets, the high short of
524 * match_props has the set index.
526 if (match_props & LookupFlag::UseMarkFilteringSet)
527 return gdef.mark_set_covers (match_props >> 16, glyph);
529 /* The second byte of match_props has the meaning
530 * "ignore marks of attachment type different than
531 * the attachment type specified."
533 if (match_props & LookupFlag::MarkAttachmentType)
534 return (match_props & LookupFlag::MarkAttachmentType) == (glyph_props & LookupFlag::MarkAttachmentType);
536 return true;
539 inline bool
540 check_glyph_property (const hb_glyph_info_t *info,
541 unsigned int match_props) const
543 hb_codepoint_t glyph = info->codepoint;
544 unsigned int glyph_props = _hb_glyph_info_get_glyph_props (info);
546 /* Not covered, if, for example, glyph class is ligature and
547 * match_props includes LookupFlags::IgnoreLigatures
549 if (glyph_props & match_props & LookupFlag::IgnoreFlags)
550 return false;
552 if (unlikely (glyph_props & HB_OT_LAYOUT_GLYPH_PROPS_MARK))
553 return match_properties_mark (glyph, glyph_props, match_props);
555 return true;
558 inline void _set_glyph_props (hb_codepoint_t glyph_index,
559 unsigned int class_guess = 0,
560 bool ligature = false,
561 bool component = false) const
563 unsigned int add_in = _hb_glyph_info_get_glyph_props (&buffer->cur()) &
564 HB_OT_LAYOUT_GLYPH_PROPS_PRESERVE;
565 add_in |= HB_OT_LAYOUT_GLYPH_PROPS_SUBSTITUTED;
566 if (ligature)
568 add_in |= HB_OT_LAYOUT_GLYPH_PROPS_LIGATED;
569 /* In the only place that the MULTIPLIED bit is used, Uniscribe
570 * seems to only care about the "last" transformation between
571 * Ligature and Multiple substitions. Ie. if you ligate, expand,
572 * and ligate again, it forgives the multiplication and acts as
573 * if only ligation happened. As such, clear MULTIPLIED bit.
575 add_in &= ~HB_OT_LAYOUT_GLYPH_PROPS_MULTIPLIED;
577 if (component)
578 add_in |= HB_OT_LAYOUT_GLYPH_PROPS_MULTIPLIED;
579 if (likely (has_glyph_classes))
580 _hb_glyph_info_set_glyph_props (&buffer->cur(), add_in | gdef.get_glyph_props (glyph_index));
581 else if (class_guess)
582 _hb_glyph_info_set_glyph_props (&buffer->cur(), add_in | class_guess);
585 inline void replace_glyph (hb_codepoint_t glyph_index) const
587 _set_glyph_props (glyph_index);
588 buffer->replace_glyph (glyph_index);
590 inline void replace_glyph_inplace (hb_codepoint_t glyph_index) const
592 _set_glyph_props (glyph_index);
593 buffer->cur().codepoint = glyph_index;
595 inline void replace_glyph_with_ligature (hb_codepoint_t glyph_index,
596 unsigned int class_guess) const
598 _set_glyph_props (glyph_index, class_guess, true);
599 buffer->replace_glyph (glyph_index);
601 inline void output_glyph_for_component (hb_codepoint_t glyph_index,
602 unsigned int class_guess) const
604 _set_glyph_props (glyph_index, class_guess, false, true);
605 buffer->output_glyph (glyph_index);
611 typedef bool (*intersects_func_t) (hb_set_t *glyphs, const USHORT &value, const void *data);
612 typedef void (*collect_glyphs_func_t) (hb_set_t *glyphs, const USHORT &value, const void *data);
613 typedef bool (*match_func_t) (hb_codepoint_t glyph_id, const USHORT &value, const void *data);
615 struct ContextClosureFuncs
617 intersects_func_t intersects;
619 struct ContextCollectGlyphsFuncs
621 collect_glyphs_func_t collect;
623 struct ContextApplyFuncs
625 match_func_t match;
629 static inline bool intersects_glyph (hb_set_t *glyphs, const USHORT &value, const void *data HB_UNUSED)
631 return glyphs->has (value);
633 static inline bool intersects_class (hb_set_t *glyphs, const USHORT &value, const void *data)
635 const ClassDef &class_def = *reinterpret_cast<const ClassDef *>(data);
636 return class_def.intersects_class (glyphs, value);
638 static inline bool intersects_coverage (hb_set_t *glyphs, const USHORT &value, const void *data)
640 const OffsetTo<Coverage> &coverage = (const OffsetTo<Coverage>&)value;
641 return (data+coverage).intersects (glyphs);
644 static inline bool intersects_array (hb_closure_context_t *c,
645 unsigned int count,
646 const USHORT values[],
647 intersects_func_t intersects_func,
648 const void *intersects_data)
650 for (unsigned int i = 0; i < count; i++)
651 if (likely (!intersects_func (c->glyphs, values[i], intersects_data)))
652 return false;
653 return true;
657 static inline void collect_glyph (hb_set_t *glyphs, const USHORT &value, const void *data HB_UNUSED)
659 glyphs->add (value);
661 static inline void collect_class (hb_set_t *glyphs, const USHORT &value, const void *data)
663 const ClassDef &class_def = *reinterpret_cast<const ClassDef *>(data);
664 class_def.add_class (glyphs, value);
666 static inline void collect_coverage (hb_set_t *glyphs, const USHORT &value, const void *data)
668 const OffsetTo<Coverage> &coverage = (const OffsetTo<Coverage>&)value;
669 (data+coverage).add_coverage (glyphs);
671 static inline void collect_array (hb_collect_glyphs_context_t *c HB_UNUSED,
672 hb_set_t *glyphs,
673 unsigned int count,
674 const USHORT values[],
675 collect_glyphs_func_t collect_func,
676 const void *collect_data)
678 for (unsigned int i = 0; i < count; i++)
679 collect_func (glyphs, values[i], collect_data);
683 static inline bool match_glyph (hb_codepoint_t glyph_id, const USHORT &value, const void *data HB_UNUSED)
685 return glyph_id == value;
687 static inline bool match_class (hb_codepoint_t glyph_id, const USHORT &value, const void *data)
689 const ClassDef &class_def = *reinterpret_cast<const ClassDef *>(data);
690 return class_def.get_class (glyph_id) == value;
692 static inline bool match_coverage (hb_codepoint_t glyph_id, const USHORT &value, const void *data)
694 const OffsetTo<Coverage> &coverage = (const OffsetTo<Coverage>&)value;
695 return (data+coverage).get_coverage (glyph_id) != NOT_COVERED;
698 static inline bool would_match_input (hb_would_apply_context_t *c,
699 unsigned int count, /* Including the first glyph (not matched) */
700 const USHORT input[], /* Array of input values--start with second glyph */
701 match_func_t match_func,
702 const void *match_data)
704 if (count != c->len)
705 return false;
707 for (unsigned int i = 1; i < count; i++)
708 if (likely (!match_func (c->glyphs[i], input[i - 1], match_data)))
709 return false;
711 return true;
713 static inline bool match_input (hb_apply_context_t *c,
714 unsigned int count, /* Including the first glyph (not matched) */
715 const USHORT input[], /* Array of input values--start with second glyph */
716 match_func_t match_func,
717 const void *match_data,
718 unsigned int *end_offset,
719 unsigned int match_positions[MAX_CONTEXT_LENGTH],
720 bool *p_is_mark_ligature = NULL,
721 unsigned int *p_total_component_count = NULL)
723 TRACE_APPLY (NULL);
725 if (unlikely (count > MAX_CONTEXT_LENGTH)) return TRACE_RETURN (false);
727 hb_buffer_t *buffer = c->buffer;
729 hb_apply_context_t::skipping_iterator_t &skippy_iter = c->iter_input;
730 skippy_iter.reset (buffer->idx, count - 1);
731 skippy_iter.set_match_func (match_func, match_data, input);
734 * This is perhaps the trickiest part of OpenType... Remarks:
736 * - If all components of the ligature were marks, we call this a mark ligature.
738 * - If there is no GDEF, and the ligature is NOT a mark ligature, we categorize
739 * it as a ligature glyph.
741 * - Ligatures cannot be formed across glyphs attached to different components
742 * of previous ligatures. Eg. the sequence is LAM,SHADDA,LAM,FATHA,HEH, and
743 * LAM,LAM,HEH form a ligature, leaving SHADDA,FATHA next to eachother.
744 * However, it would be wrong to ligate that SHADDA,FATHA sequence.o
745 * There is an exception to this: If a ligature tries ligating with marks that
746 * belong to it itself, go ahead, assuming that the font designer knows what
747 * they are doing (otherwise it can break Indic stuff when a matra wants to
748 * ligate with a conjunct...)
751 bool is_mark_ligature = _hb_glyph_info_is_mark (&buffer->cur());
753 unsigned int total_component_count = 0;
754 total_component_count += _hb_glyph_info_get_lig_num_comps (&buffer->cur());
756 unsigned int first_lig_id = _hb_glyph_info_get_lig_id (&buffer->cur());
757 unsigned int first_lig_comp = _hb_glyph_info_get_lig_comp (&buffer->cur());
759 match_positions[0] = buffer->idx;
760 for (unsigned int i = 1; i < count; i++)
762 if (!skippy_iter.next ()) return TRACE_RETURN (false);
764 match_positions[i] = skippy_iter.idx;
766 unsigned int this_lig_id = _hb_glyph_info_get_lig_id (&buffer->info[skippy_iter.idx]);
767 unsigned int this_lig_comp = _hb_glyph_info_get_lig_comp (&buffer->info[skippy_iter.idx]);
769 if (first_lig_id && first_lig_comp) {
770 /* If first component was attached to a previous ligature component,
771 * all subsequent components should be attached to the same ligature
772 * component, otherwise we shouldn't ligate them. */
773 if (first_lig_id != this_lig_id || first_lig_comp != this_lig_comp)
774 return TRACE_RETURN (false);
775 } else {
776 /* If first component was NOT attached to a previous ligature component,
777 * all subsequent components should also NOT be attached to any ligature
778 * component, unless they are attached to the first component itself! */
779 if (this_lig_id && this_lig_comp && (this_lig_id != first_lig_id))
780 return TRACE_RETURN (false);
783 is_mark_ligature = is_mark_ligature && _hb_glyph_info_is_mark (&buffer->info[skippy_iter.idx]);
784 total_component_count += _hb_glyph_info_get_lig_num_comps (&buffer->info[skippy_iter.idx]);
787 *end_offset = skippy_iter.idx - buffer->idx + 1;
789 if (p_is_mark_ligature)
790 *p_is_mark_ligature = is_mark_ligature;
792 if (p_total_component_count)
793 *p_total_component_count = total_component_count;
795 return TRACE_RETURN (true);
797 static inline void ligate_input (hb_apply_context_t *c,
798 unsigned int count, /* Including the first glyph */
799 unsigned int match_positions[MAX_CONTEXT_LENGTH], /* Including the first glyph */
800 unsigned int match_length,
801 hb_codepoint_t lig_glyph,
802 bool is_mark_ligature,
803 unsigned int total_component_count)
805 TRACE_APPLY (NULL);
807 hb_buffer_t *buffer = c->buffer;
809 buffer->merge_clusters (buffer->idx, buffer->idx + match_length);
812 * - If it *is* a mark ligature, we don't allocate a new ligature id, and leave
813 * the ligature to keep its old ligature id. This will allow it to attach to
814 * a base ligature in GPOS. Eg. if the sequence is: LAM,LAM,SHADDA,FATHA,HEH,
815 * and LAM,LAM,HEH for a ligature, they will leave SHADDA and FATHA wit a
816 * ligature id and component value of 2. Then if SHADDA,FATHA form a ligature
817 * later, we don't want them to lose their ligature id/component, otherwise
818 * GPOS will fail to correctly position the mark ligature on top of the
819 * LAM,LAM,HEH ligature. See:
820 * https://bugzilla.gnome.org/show_bug.cgi?id=676343
822 * - If a ligature is formed of components that some of which are also ligatures
823 * themselves, and those ligature components had marks attached to *their*
824 * components, we have to attach the marks to the new ligature component
825 * positions! Now *that*'s tricky! And these marks may be following the
826 * last component of the whole sequence, so we should loop forward looking
827 * for them and update them.
829 * Eg. the sequence is LAM,LAM,SHADDA,FATHA,HEH, and the font first forms a
830 * 'calt' ligature of LAM,HEH, leaving the SHADDA and FATHA with a ligature
831 * id and component == 1. Now, during 'liga', the LAM and the LAM-HEH ligature
832 * form a LAM-LAM-HEH ligature. We need to reassign the SHADDA and FATHA to
833 * the new ligature with a component value of 2.
835 * This in fact happened to a font... See:
836 * https://bugzilla.gnome.org/show_bug.cgi?id=437633
839 unsigned int klass = is_mark_ligature ? 0 : HB_OT_LAYOUT_GLYPH_PROPS_LIGATURE;
840 unsigned int lig_id = is_mark_ligature ? 0 : _hb_allocate_lig_id (buffer);
841 unsigned int last_lig_id = _hb_glyph_info_get_lig_id (&buffer->cur());
842 unsigned int last_num_components = _hb_glyph_info_get_lig_num_comps (&buffer->cur());
843 unsigned int components_so_far = last_num_components;
845 if (!is_mark_ligature)
847 _hb_glyph_info_set_lig_props_for_ligature (&buffer->cur(), lig_id, total_component_count);
848 if (_hb_glyph_info_get_general_category (&buffer->cur()) == HB_UNICODE_GENERAL_CATEGORY_NON_SPACING_MARK)
850 _hb_glyph_info_set_general_category (&buffer->cur(), HB_UNICODE_GENERAL_CATEGORY_OTHER_LETTER);
851 _hb_glyph_info_set_modified_combining_class (&buffer->cur(), 0);
854 c->replace_glyph_with_ligature (lig_glyph, klass);
856 for (unsigned int i = 1; i < count; i++)
858 while (buffer->idx < match_positions[i])
860 if (!is_mark_ligature) {
861 unsigned int new_lig_comp = components_so_far - last_num_components +
862 MIN (MAX (_hb_glyph_info_get_lig_comp (&buffer->cur()), 1u), last_num_components);
863 _hb_glyph_info_set_lig_props_for_mark (&buffer->cur(), lig_id, new_lig_comp);
865 buffer->next_glyph ();
868 last_lig_id = _hb_glyph_info_get_lig_id (&buffer->cur());
869 last_num_components = _hb_glyph_info_get_lig_num_comps (&buffer->cur());
870 components_so_far += last_num_components;
872 /* Skip the base glyph */
873 buffer->idx++;
876 if (!is_mark_ligature && last_lig_id) {
877 /* Re-adjust components for any marks following. */
878 for (unsigned int i = buffer->idx; i < buffer->len; i++) {
879 if (last_lig_id == _hb_glyph_info_get_lig_id (&buffer->info[i])) {
880 unsigned int new_lig_comp = components_so_far - last_num_components +
881 MIN (MAX (_hb_glyph_info_get_lig_comp (&buffer->info[i]), 1u), last_num_components);
882 _hb_glyph_info_set_lig_props_for_mark (&buffer->info[i], lig_id, new_lig_comp);
883 } else
884 break;
887 TRACE_RETURN (true);
890 static inline bool match_backtrack (hb_apply_context_t *c,
891 unsigned int count,
892 const USHORT backtrack[],
893 match_func_t match_func,
894 const void *match_data)
896 TRACE_APPLY (NULL);
898 hb_apply_context_t::skipping_iterator_t &skippy_iter = c->iter_context;
899 skippy_iter.reset (c->buffer->backtrack_len (), count);
900 skippy_iter.set_match_func (match_func, match_data, backtrack);
902 for (unsigned int i = 0; i < count; i++)
903 if (!skippy_iter.prev ())
904 return TRACE_RETURN (false);
906 return TRACE_RETURN (true);
909 static inline bool match_lookahead (hb_apply_context_t *c,
910 unsigned int count,
911 const USHORT lookahead[],
912 match_func_t match_func,
913 const void *match_data,
914 unsigned int offset)
916 TRACE_APPLY (NULL);
918 hb_apply_context_t::skipping_iterator_t &skippy_iter = c->iter_context;
919 skippy_iter.reset (c->buffer->idx + offset - 1, count);
920 skippy_iter.set_match_func (match_func, match_data, lookahead);
922 for (unsigned int i = 0; i < count; i++)
923 if (!skippy_iter.next ())
924 return TRACE_RETURN (false);
926 return TRACE_RETURN (true);
931 struct LookupRecord
933 inline bool sanitize (hb_sanitize_context_t *c) const
935 TRACE_SANITIZE (this);
936 return TRACE_RETURN (c->check_struct (this));
939 USHORT sequenceIndex; /* Index into current glyph
940 * sequence--first glyph = 0 */
941 USHORT lookupListIndex; /* Lookup to apply to that
942 * position--zero--based */
943 public:
944 DEFINE_SIZE_STATIC (4);
948 template <typename context_t>
949 static inline void recurse_lookups (context_t *c,
950 unsigned int lookupCount,
951 const LookupRecord lookupRecord[] /* Array of LookupRecords--in design order */)
953 for (unsigned int i = 0; i < lookupCount; i++)
954 c->recurse (lookupRecord[i].lookupListIndex);
957 static inline bool apply_lookup (hb_apply_context_t *c,
958 unsigned int count, /* Including the first glyph */
959 unsigned int match_positions[MAX_CONTEXT_LENGTH], /* Including the first glyph */
960 unsigned int lookupCount,
961 const LookupRecord lookupRecord[], /* Array of LookupRecords--in design order */
962 unsigned int match_length)
964 TRACE_APPLY (NULL);
966 hb_buffer_t *buffer = c->buffer;
967 unsigned int end;
969 /* All positions are distance from beginning of *output* buffer.
970 * Adjust. */
972 unsigned int bl = buffer->backtrack_len ();
973 end = bl + match_length;
975 int delta = bl - buffer->idx;
976 /* Convert positions to new indexing. */
977 for (unsigned int j = 0; j < count; j++)
978 match_positions[j] += delta;
981 for (unsigned int i = 0; i < lookupCount; i++)
983 unsigned int idx = lookupRecord[i].sequenceIndex;
984 if (idx >= count)
985 continue;
987 buffer->move_to (match_positions[idx]);
989 unsigned int orig_len = buffer->backtrack_len () + buffer->lookahead_len ();
990 if (!c->recurse (lookupRecord[i].lookupListIndex))
991 continue;
993 unsigned int new_len = buffer->backtrack_len () + buffer->lookahead_len ();
994 int delta = new_len - orig_len;
996 if (!delta)
997 continue;
999 /* Recursed lookup changed buffer len. Adjust. */
1001 /* end can't go back past the current match position.
1002 * Note: this is only true because we do NOT allow MultipleSubst
1003 * with zero sequence len. */
1004 end = MAX ((int) match_positions[idx] + 1, int (end) + delta);
1006 unsigned int next = idx + 1; /* next now is the position after the recursed lookup. */
1008 if (delta > 0)
1010 if (unlikely (delta + count > MAX_CONTEXT_LENGTH))
1011 break;
1013 else
1015 /* NOTE: delta is negative. */
1016 delta = MAX (delta, (int) next - (int) count);
1017 next -= delta;
1020 /* Shift! */
1021 memmove (match_positions + next + delta, match_positions + next,
1022 (count - next) * sizeof (match_positions[0]));
1023 next += delta;
1024 count += delta;
1026 /* Fill in new entries. */
1027 for (unsigned int j = idx + 1; j < next; j++)
1028 match_positions[j] = match_positions[j - 1] + 1;
1030 /* And fixup the rest. */
1031 for (; next < count; next++)
1032 match_positions[next] += delta;
1035 buffer->move_to (end);
1037 return TRACE_RETURN (true);
1042 /* Contextual lookups */
1044 struct ContextClosureLookupContext
1046 ContextClosureFuncs funcs;
1047 const void *intersects_data;
1050 struct ContextCollectGlyphsLookupContext
1052 ContextCollectGlyphsFuncs funcs;
1053 const void *collect_data;
1056 struct ContextApplyLookupContext
1058 ContextApplyFuncs funcs;
1059 const void *match_data;
1062 static inline void context_closure_lookup (hb_closure_context_t *c,
1063 unsigned int inputCount, /* Including the first glyph (not matched) */
1064 const USHORT input[], /* Array of input values--start with second glyph */
1065 unsigned int lookupCount,
1066 const LookupRecord lookupRecord[],
1067 ContextClosureLookupContext &lookup_context)
1069 if (intersects_array (c,
1070 inputCount ? inputCount - 1 : 0, input,
1071 lookup_context.funcs.intersects, lookup_context.intersects_data))
1072 recurse_lookups (c,
1073 lookupCount, lookupRecord);
1076 static inline void context_collect_glyphs_lookup (hb_collect_glyphs_context_t *c,
1077 unsigned int inputCount, /* Including the first glyph (not matched) */
1078 const USHORT input[], /* Array of input values--start with second glyph */
1079 unsigned int lookupCount,
1080 const LookupRecord lookupRecord[],
1081 ContextCollectGlyphsLookupContext &lookup_context)
1083 collect_array (c, c->input,
1084 inputCount ? inputCount - 1 : 0, input,
1085 lookup_context.funcs.collect, lookup_context.collect_data);
1086 recurse_lookups (c,
1087 lookupCount, lookupRecord);
1090 static inline bool context_would_apply_lookup (hb_would_apply_context_t *c,
1091 unsigned int inputCount, /* Including the first glyph (not matched) */
1092 const USHORT input[], /* Array of input values--start with second glyph */
1093 unsigned int lookupCount HB_UNUSED,
1094 const LookupRecord lookupRecord[] HB_UNUSED,
1095 ContextApplyLookupContext &lookup_context)
1097 return would_match_input (c,
1098 inputCount, input,
1099 lookup_context.funcs.match, lookup_context.match_data);
1101 static inline bool context_apply_lookup (hb_apply_context_t *c,
1102 unsigned int inputCount, /* Including the first glyph (not matched) */
1103 const USHORT input[], /* Array of input values--start with second glyph */
1104 unsigned int lookupCount,
1105 const LookupRecord lookupRecord[],
1106 ContextApplyLookupContext &lookup_context)
1108 unsigned int match_length = 0;
1109 unsigned int match_positions[MAX_CONTEXT_LENGTH];
1110 return match_input (c,
1111 inputCount, input,
1112 lookup_context.funcs.match, lookup_context.match_data,
1113 &match_length, match_positions)
1114 && apply_lookup (c,
1115 inputCount, match_positions,
1116 lookupCount, lookupRecord,
1117 match_length);
1120 struct Rule
1122 inline void closure (hb_closure_context_t *c, ContextClosureLookupContext &lookup_context) const
1124 TRACE_CLOSURE (this);
1125 const LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (inputZ, inputZ[0].static_size * (inputCount ? inputCount - 1 : 0));
1126 context_closure_lookup (c,
1127 inputCount, inputZ,
1128 lookupCount, lookupRecord,
1129 lookup_context);
1132 inline void collect_glyphs (hb_collect_glyphs_context_t *c, ContextCollectGlyphsLookupContext &lookup_context) const
1134 TRACE_COLLECT_GLYPHS (this);
1135 const LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (inputZ, inputZ[0].static_size * (inputCount ? inputCount - 1 : 0));
1136 context_collect_glyphs_lookup (c,
1137 inputCount, inputZ,
1138 lookupCount, lookupRecord,
1139 lookup_context);
1142 inline bool would_apply (hb_would_apply_context_t *c, ContextApplyLookupContext &lookup_context) const
1144 TRACE_WOULD_APPLY (this);
1145 const LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (inputZ, inputZ[0].static_size * (inputCount ? inputCount - 1 : 0));
1146 return TRACE_RETURN (context_would_apply_lookup (c, inputCount, inputZ, lookupCount, lookupRecord, lookup_context));
1149 inline bool apply (hb_apply_context_t *c, ContextApplyLookupContext &lookup_context) const
1151 TRACE_APPLY (this);
1152 const LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (inputZ, inputZ[0].static_size * (inputCount ? inputCount - 1 : 0));
1153 return TRACE_RETURN (context_apply_lookup (c, inputCount, inputZ, lookupCount, lookupRecord, lookup_context));
1156 public:
1157 inline bool sanitize (hb_sanitize_context_t *c) const
1159 TRACE_SANITIZE (this);
1160 return inputCount.sanitize (c)
1161 && lookupCount.sanitize (c)
1162 && c->check_range (inputZ,
1163 inputZ[0].static_size * inputCount
1164 + lookupRecordX[0].static_size * lookupCount);
1167 protected:
1168 USHORT inputCount; /* Total number of glyphs in input
1169 * glyph sequence--includes the first
1170 * glyph */
1171 USHORT lookupCount; /* Number of LookupRecords */
1172 USHORT inputZ[VAR]; /* Array of match inputs--start with
1173 * second glyph */
1174 LookupRecord lookupRecordX[VAR]; /* Array of LookupRecords--in
1175 * design order */
1176 public:
1177 DEFINE_SIZE_ARRAY2 (4, inputZ, lookupRecordX);
1180 struct RuleSet
1182 inline void closure (hb_closure_context_t *c, ContextClosureLookupContext &lookup_context) const
1184 TRACE_CLOSURE (this);
1185 unsigned int num_rules = rule.len;
1186 for (unsigned int i = 0; i < num_rules; i++)
1187 (this+rule[i]).closure (c, lookup_context);
1190 inline void collect_glyphs (hb_collect_glyphs_context_t *c, ContextCollectGlyphsLookupContext &lookup_context) const
1192 TRACE_COLLECT_GLYPHS (this);
1193 unsigned int num_rules = rule.len;
1194 for (unsigned int i = 0; i < num_rules; i++)
1195 (this+rule[i]).collect_glyphs (c, lookup_context);
1198 inline bool would_apply (hb_would_apply_context_t *c, ContextApplyLookupContext &lookup_context) const
1200 TRACE_WOULD_APPLY (this);
1201 unsigned int num_rules = rule.len;
1202 for (unsigned int i = 0; i < num_rules; i++)
1204 if ((this+rule[i]).would_apply (c, lookup_context))
1205 return TRACE_RETURN (true);
1207 return TRACE_RETURN (false);
1210 inline bool apply (hb_apply_context_t *c, ContextApplyLookupContext &lookup_context) const
1212 TRACE_APPLY (this);
1213 unsigned int num_rules = rule.len;
1214 for (unsigned int i = 0; i < num_rules; i++)
1216 if ((this+rule[i]).apply (c, lookup_context))
1217 return TRACE_RETURN (true);
1219 return TRACE_RETURN (false);
1222 inline bool sanitize (hb_sanitize_context_t *c) const
1224 TRACE_SANITIZE (this);
1225 return TRACE_RETURN (rule.sanitize (c, this));
1228 protected:
1229 OffsetArrayOf<Rule>
1230 rule; /* Array of Rule tables
1231 * ordered by preference */
1232 public:
1233 DEFINE_SIZE_ARRAY (2, rule);
1237 struct ContextFormat1
1239 inline void closure (hb_closure_context_t *c) const
1241 TRACE_CLOSURE (this);
1243 const Coverage &cov = (this+coverage);
1245 struct ContextClosureLookupContext lookup_context = {
1246 {intersects_glyph},
1247 NULL
1250 unsigned int count = ruleSet.len;
1251 for (unsigned int i = 0; i < count; i++)
1252 if (cov.intersects_coverage (c->glyphs, i)) {
1253 const RuleSet &rule_set = this+ruleSet[i];
1254 rule_set.closure (c, lookup_context);
1258 inline void collect_glyphs (hb_collect_glyphs_context_t *c) const
1260 TRACE_COLLECT_GLYPHS (this);
1261 (this+coverage).add_coverage (c->input);
1263 struct ContextCollectGlyphsLookupContext lookup_context = {
1264 {collect_glyph},
1265 NULL
1268 unsigned int count = ruleSet.len;
1269 for (unsigned int i = 0; i < count; i++)
1270 (this+ruleSet[i]).collect_glyphs (c, lookup_context);
1273 inline bool would_apply (hb_would_apply_context_t *c) const
1275 TRACE_WOULD_APPLY (this);
1277 const RuleSet &rule_set = this+ruleSet[(this+coverage).get_coverage (c->glyphs[0])];
1278 struct ContextApplyLookupContext lookup_context = {
1279 {match_glyph},
1280 NULL
1282 return TRACE_RETURN (rule_set.would_apply (c, lookup_context));
1285 inline const Coverage &get_coverage (void) const
1287 return this+coverage;
1290 inline bool apply (hb_apply_context_t *c) const
1292 TRACE_APPLY (this);
1293 unsigned int index = (this+coverage).get_coverage (c->buffer->cur().codepoint);
1294 if (likely (index == NOT_COVERED))
1295 return TRACE_RETURN (false);
1297 const RuleSet &rule_set = this+ruleSet[index];
1298 struct ContextApplyLookupContext lookup_context = {
1299 {match_glyph},
1300 NULL
1302 return TRACE_RETURN (rule_set.apply (c, lookup_context));
1305 inline bool sanitize (hb_sanitize_context_t *c) const
1307 TRACE_SANITIZE (this);
1308 return TRACE_RETURN (coverage.sanitize (c, this) && ruleSet.sanitize (c, this));
1311 protected:
1312 USHORT format; /* Format identifier--format = 1 */
1313 OffsetTo<Coverage>
1314 coverage; /* Offset to Coverage table--from
1315 * beginning of table */
1316 OffsetArrayOf<RuleSet>
1317 ruleSet; /* Array of RuleSet tables
1318 * ordered by Coverage Index */
1319 public:
1320 DEFINE_SIZE_ARRAY (6, ruleSet);
1324 struct ContextFormat2
1326 inline void closure (hb_closure_context_t *c) const
1328 TRACE_CLOSURE (this);
1329 if (!(this+coverage).intersects (c->glyphs))
1330 return;
1332 const ClassDef &class_def = this+classDef;
1334 struct ContextClosureLookupContext lookup_context = {
1335 {intersects_class},
1336 &class_def
1339 unsigned int count = ruleSet.len;
1340 for (unsigned int i = 0; i < count; i++)
1341 if (class_def.intersects_class (c->glyphs, i)) {
1342 const RuleSet &rule_set = this+ruleSet[i];
1343 rule_set.closure (c, lookup_context);
1347 inline void collect_glyphs (hb_collect_glyphs_context_t *c) const
1349 TRACE_COLLECT_GLYPHS (this);
1350 (this+coverage).add_coverage (c->input);
1352 const ClassDef &class_def = this+classDef;
1353 struct ContextCollectGlyphsLookupContext lookup_context = {
1354 {collect_class},
1355 &class_def
1358 unsigned int count = ruleSet.len;
1359 for (unsigned int i = 0; i < count; i++)
1360 (this+ruleSet[i]).collect_glyphs (c, lookup_context);
1363 inline bool would_apply (hb_would_apply_context_t *c) const
1365 TRACE_WOULD_APPLY (this);
1367 const ClassDef &class_def = this+classDef;
1368 unsigned int index = class_def.get_class (c->glyphs[0]);
1369 const RuleSet &rule_set = this+ruleSet[index];
1370 struct ContextApplyLookupContext lookup_context = {
1371 {match_class},
1372 &class_def
1374 return TRACE_RETURN (rule_set.would_apply (c, lookup_context));
1377 inline const Coverage &get_coverage (void) const
1379 return this+coverage;
1382 inline bool apply (hb_apply_context_t *c) const
1384 TRACE_APPLY (this);
1385 unsigned int index = (this+coverage).get_coverage (c->buffer->cur().codepoint);
1386 if (likely (index == NOT_COVERED)) return TRACE_RETURN (false);
1388 const ClassDef &class_def = this+classDef;
1389 index = class_def.get_class (c->buffer->cur().codepoint);
1390 const RuleSet &rule_set = this+ruleSet[index];
1391 struct ContextApplyLookupContext lookup_context = {
1392 {match_class},
1393 &class_def
1395 return TRACE_RETURN (rule_set.apply (c, lookup_context));
1398 inline bool sanitize (hb_sanitize_context_t *c) const
1400 TRACE_SANITIZE (this);
1401 return TRACE_RETURN (coverage.sanitize (c, this) && classDef.sanitize (c, this) && ruleSet.sanitize (c, this));
1404 protected:
1405 USHORT format; /* Format identifier--format = 2 */
1406 OffsetTo<Coverage>
1407 coverage; /* Offset to Coverage table--from
1408 * beginning of table */
1409 OffsetTo<ClassDef>
1410 classDef; /* Offset to glyph ClassDef table--from
1411 * beginning of table */
1412 OffsetArrayOf<RuleSet>
1413 ruleSet; /* Array of RuleSet tables
1414 * ordered by class */
1415 public:
1416 DEFINE_SIZE_ARRAY (8, ruleSet);
1420 struct ContextFormat3
1422 inline void closure (hb_closure_context_t *c) const
1424 TRACE_CLOSURE (this);
1425 if (!(this+coverageZ[0]).intersects (c->glyphs))
1426 return;
1428 const LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (coverageZ, coverageZ[0].static_size * glyphCount);
1429 struct ContextClosureLookupContext lookup_context = {
1430 {intersects_coverage},
1431 this
1433 context_closure_lookup (c,
1434 glyphCount, (const USHORT *) (coverageZ + 1),
1435 lookupCount, lookupRecord,
1436 lookup_context);
1439 inline void collect_glyphs (hb_collect_glyphs_context_t *c) const
1441 TRACE_COLLECT_GLYPHS (this);
1442 (this+coverageZ[0]).add_coverage (c->input);
1444 const LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (coverageZ, coverageZ[0].static_size * glyphCount);
1445 struct ContextCollectGlyphsLookupContext lookup_context = {
1446 {collect_coverage},
1447 this
1450 context_collect_glyphs_lookup (c,
1451 glyphCount, (const USHORT *) (coverageZ + 1),
1452 lookupCount, lookupRecord,
1453 lookup_context);
1456 inline bool would_apply (hb_would_apply_context_t *c) const
1458 TRACE_WOULD_APPLY (this);
1460 const LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (coverageZ, coverageZ[0].static_size * glyphCount);
1461 struct ContextApplyLookupContext lookup_context = {
1462 {match_coverage},
1463 this
1465 return TRACE_RETURN (context_would_apply_lookup (c, glyphCount, (const USHORT *) (coverageZ + 1), lookupCount, lookupRecord, lookup_context));
1468 inline const Coverage &get_coverage (void) const
1470 return this+coverageZ[0];
1473 inline bool apply (hb_apply_context_t *c) const
1475 TRACE_APPLY (this);
1476 unsigned int index = (this+coverageZ[0]).get_coverage (c->buffer->cur().codepoint);
1477 if (likely (index == NOT_COVERED)) return TRACE_RETURN (false);
1479 const LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (coverageZ, coverageZ[0].static_size * glyphCount);
1480 struct ContextApplyLookupContext lookup_context = {
1481 {match_coverage},
1482 this
1484 return TRACE_RETURN (context_apply_lookup (c, glyphCount, (const USHORT *) (coverageZ + 1), lookupCount, lookupRecord, lookup_context));
1487 inline bool sanitize (hb_sanitize_context_t *c) const
1489 TRACE_SANITIZE (this);
1490 if (!c->check_struct (this)) return TRACE_RETURN (false);
1491 unsigned int count = glyphCount;
1492 if (!count) return TRACE_RETURN (false); /* We want to access coverageZ[0] freely. */
1493 if (!c->check_array (coverageZ, coverageZ[0].static_size, count)) return TRACE_RETURN (false);
1494 for (unsigned int i = 0; i < count; i++)
1495 if (!coverageZ[i].sanitize (c, this)) return TRACE_RETURN (false);
1496 const LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (coverageZ, coverageZ[0].static_size * count);
1497 return TRACE_RETURN (c->check_array (lookupRecord, lookupRecord[0].static_size, lookupCount));
1500 protected:
1501 USHORT format; /* Format identifier--format = 3 */
1502 USHORT glyphCount; /* Number of glyphs in the input glyph
1503 * sequence */
1504 USHORT lookupCount; /* Number of LookupRecords */
1505 OffsetTo<Coverage>
1506 coverageZ[VAR]; /* Array of offsets to Coverage
1507 * table in glyph sequence order */
1508 LookupRecord lookupRecordX[VAR]; /* Array of LookupRecords--in
1509 * design order */
1510 public:
1511 DEFINE_SIZE_ARRAY2 (6, coverageZ, lookupRecordX);
1514 struct Context
1516 template <typename context_t>
1517 inline typename context_t::return_t dispatch (context_t *c) const
1519 TRACE_DISPATCH (this, u.format);
1520 if (unlikely (!c->may_dispatch (this, &u.format))) TRACE_RETURN (c->default_return_value ());
1521 switch (u.format) {
1522 case 1: return TRACE_RETURN (c->dispatch (u.format1));
1523 case 2: return TRACE_RETURN (c->dispatch (u.format2));
1524 case 3: return TRACE_RETURN (c->dispatch (u.format3));
1525 default:return TRACE_RETURN (c->default_return_value ());
1529 protected:
1530 union {
1531 USHORT format; /* Format identifier */
1532 ContextFormat1 format1;
1533 ContextFormat2 format2;
1534 ContextFormat3 format3;
1535 } u;
1539 /* Chaining Contextual lookups */
1541 struct ChainContextClosureLookupContext
1543 ContextClosureFuncs funcs;
1544 const void *intersects_data[3];
1547 struct ChainContextCollectGlyphsLookupContext
1549 ContextCollectGlyphsFuncs funcs;
1550 const void *collect_data[3];
1553 struct ChainContextApplyLookupContext
1555 ContextApplyFuncs funcs;
1556 const void *match_data[3];
1559 static inline void chain_context_closure_lookup (hb_closure_context_t *c,
1560 unsigned int backtrackCount,
1561 const USHORT backtrack[],
1562 unsigned int inputCount, /* Including the first glyph (not matched) */
1563 const USHORT input[], /* Array of input values--start with second glyph */
1564 unsigned int lookaheadCount,
1565 const USHORT lookahead[],
1566 unsigned int lookupCount,
1567 const LookupRecord lookupRecord[],
1568 ChainContextClosureLookupContext &lookup_context)
1570 if (intersects_array (c,
1571 backtrackCount, backtrack,
1572 lookup_context.funcs.intersects, lookup_context.intersects_data[0])
1573 && intersects_array (c,
1574 inputCount ? inputCount - 1 : 0, input,
1575 lookup_context.funcs.intersects, lookup_context.intersects_data[1])
1576 && intersects_array (c,
1577 lookaheadCount, lookahead,
1578 lookup_context.funcs.intersects, lookup_context.intersects_data[2]))
1579 recurse_lookups (c,
1580 lookupCount, lookupRecord);
1583 static inline void chain_context_collect_glyphs_lookup (hb_collect_glyphs_context_t *c,
1584 unsigned int backtrackCount,
1585 const USHORT backtrack[],
1586 unsigned int inputCount, /* Including the first glyph (not matched) */
1587 const USHORT input[], /* Array of input values--start with second glyph */
1588 unsigned int lookaheadCount,
1589 const USHORT lookahead[],
1590 unsigned int lookupCount,
1591 const LookupRecord lookupRecord[],
1592 ChainContextCollectGlyphsLookupContext &lookup_context)
1594 collect_array (c, c->before,
1595 backtrackCount, backtrack,
1596 lookup_context.funcs.collect, lookup_context.collect_data[0]);
1597 collect_array (c, c->input,
1598 inputCount ? inputCount - 1 : 0, input,
1599 lookup_context.funcs.collect, lookup_context.collect_data[1]);
1600 collect_array (c, c->after,
1601 lookaheadCount, lookahead,
1602 lookup_context.funcs.collect, lookup_context.collect_data[2]);
1603 recurse_lookups (c,
1604 lookupCount, lookupRecord);
1607 static inline bool chain_context_would_apply_lookup (hb_would_apply_context_t *c,
1608 unsigned int backtrackCount,
1609 const USHORT backtrack[] HB_UNUSED,
1610 unsigned int inputCount, /* Including the first glyph (not matched) */
1611 const USHORT input[], /* Array of input values--start with second glyph */
1612 unsigned int lookaheadCount,
1613 const USHORT lookahead[] HB_UNUSED,
1614 unsigned int lookupCount HB_UNUSED,
1615 const LookupRecord lookupRecord[] HB_UNUSED,
1616 ChainContextApplyLookupContext &lookup_context)
1618 return (c->zero_context ? !backtrackCount && !lookaheadCount : true)
1619 && would_match_input (c,
1620 inputCount, input,
1621 lookup_context.funcs.match, lookup_context.match_data[1]);
1624 static inline bool chain_context_apply_lookup (hb_apply_context_t *c,
1625 unsigned int backtrackCount,
1626 const USHORT backtrack[],
1627 unsigned int inputCount, /* Including the first glyph (not matched) */
1628 const USHORT input[], /* Array of input values--start with second glyph */
1629 unsigned int lookaheadCount,
1630 const USHORT lookahead[],
1631 unsigned int lookupCount,
1632 const LookupRecord lookupRecord[],
1633 ChainContextApplyLookupContext &lookup_context)
1635 unsigned int match_length = 0;
1636 unsigned int match_positions[MAX_CONTEXT_LENGTH];
1637 return match_input (c,
1638 inputCount, input,
1639 lookup_context.funcs.match, lookup_context.match_data[1],
1640 &match_length, match_positions)
1641 && match_backtrack (c,
1642 backtrackCount, backtrack,
1643 lookup_context.funcs.match, lookup_context.match_data[0])
1644 && match_lookahead (c,
1645 lookaheadCount, lookahead,
1646 lookup_context.funcs.match, lookup_context.match_data[2],
1647 match_length)
1648 && apply_lookup (c,
1649 inputCount, match_positions,
1650 lookupCount, lookupRecord,
1651 match_length);
1654 struct ChainRule
1656 inline void closure (hb_closure_context_t *c, ChainContextClosureLookupContext &lookup_context) const
1658 TRACE_CLOSURE (this);
1659 const HeadlessArrayOf<USHORT> &input = StructAfter<HeadlessArrayOf<USHORT> > (backtrack);
1660 const ArrayOf<USHORT> &lookahead = StructAfter<ArrayOf<USHORT> > (input);
1661 const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead);
1662 chain_context_closure_lookup (c,
1663 backtrack.len, backtrack.array,
1664 input.len, input.array,
1665 lookahead.len, lookahead.array,
1666 lookup.len, lookup.array,
1667 lookup_context);
1670 inline void collect_glyphs (hb_collect_glyphs_context_t *c, ChainContextCollectGlyphsLookupContext &lookup_context) const
1672 TRACE_COLLECT_GLYPHS (this);
1673 const HeadlessArrayOf<USHORT> &input = StructAfter<HeadlessArrayOf<USHORT> > (backtrack);
1674 const ArrayOf<USHORT> &lookahead = StructAfter<ArrayOf<USHORT> > (input);
1675 const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead);
1676 chain_context_collect_glyphs_lookup (c,
1677 backtrack.len, backtrack.array,
1678 input.len, input.array,
1679 lookahead.len, lookahead.array,
1680 lookup.len, lookup.array,
1681 lookup_context);
1684 inline bool would_apply (hb_would_apply_context_t *c, ChainContextApplyLookupContext &lookup_context) const
1686 TRACE_WOULD_APPLY (this);
1687 const HeadlessArrayOf<USHORT> &input = StructAfter<HeadlessArrayOf<USHORT> > (backtrack);
1688 const ArrayOf<USHORT> &lookahead = StructAfter<ArrayOf<USHORT> > (input);
1689 const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead);
1690 return TRACE_RETURN (chain_context_would_apply_lookup (c,
1691 backtrack.len, backtrack.array,
1692 input.len, input.array,
1693 lookahead.len, lookahead.array, lookup.len,
1694 lookup.array, lookup_context));
1697 inline bool apply (hb_apply_context_t *c, ChainContextApplyLookupContext &lookup_context) const
1699 TRACE_APPLY (this);
1700 const HeadlessArrayOf<USHORT> &input = StructAfter<HeadlessArrayOf<USHORT> > (backtrack);
1701 const ArrayOf<USHORT> &lookahead = StructAfter<ArrayOf<USHORT> > (input);
1702 const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead);
1703 return TRACE_RETURN (chain_context_apply_lookup (c,
1704 backtrack.len, backtrack.array,
1705 input.len, input.array,
1706 lookahead.len, lookahead.array, lookup.len,
1707 lookup.array, lookup_context));
1710 inline bool sanitize (hb_sanitize_context_t *c) const
1712 TRACE_SANITIZE (this);
1713 if (!backtrack.sanitize (c)) return TRACE_RETURN (false);
1714 const HeadlessArrayOf<USHORT> &input = StructAfter<HeadlessArrayOf<USHORT> > (backtrack);
1715 if (!input.sanitize (c)) return TRACE_RETURN (false);
1716 const ArrayOf<USHORT> &lookahead = StructAfter<ArrayOf<USHORT> > (input);
1717 if (!lookahead.sanitize (c)) return TRACE_RETURN (false);
1718 const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead);
1719 return TRACE_RETURN (lookup.sanitize (c));
1722 protected:
1723 ArrayOf<USHORT>
1724 backtrack; /* Array of backtracking values
1725 * (to be matched before the input
1726 * sequence) */
1727 HeadlessArrayOf<USHORT>
1728 inputX; /* Array of input values (start with
1729 * second glyph) */
1730 ArrayOf<USHORT>
1731 lookaheadX; /* Array of lookahead values's (to be
1732 * matched after the input sequence) */
1733 ArrayOf<LookupRecord>
1734 lookupX; /* Array of LookupRecords--in
1735 * design order) */
1736 public:
1737 DEFINE_SIZE_MIN (8);
1740 struct ChainRuleSet
1742 inline void closure (hb_closure_context_t *c, ChainContextClosureLookupContext &lookup_context) const
1744 TRACE_CLOSURE (this);
1745 unsigned int num_rules = rule.len;
1746 for (unsigned int i = 0; i < num_rules; i++)
1747 (this+rule[i]).closure (c, lookup_context);
1750 inline void collect_glyphs (hb_collect_glyphs_context_t *c, ChainContextCollectGlyphsLookupContext &lookup_context) const
1752 TRACE_COLLECT_GLYPHS (this);
1753 unsigned int num_rules = rule.len;
1754 for (unsigned int i = 0; i < num_rules; i++)
1755 (this+rule[i]).collect_glyphs (c, lookup_context);
1758 inline bool would_apply (hb_would_apply_context_t *c, ChainContextApplyLookupContext &lookup_context) const
1760 TRACE_WOULD_APPLY (this);
1761 unsigned int num_rules = rule.len;
1762 for (unsigned int i = 0; i < num_rules; i++)
1763 if ((this+rule[i]).would_apply (c, lookup_context))
1764 return TRACE_RETURN (true);
1766 return TRACE_RETURN (false);
1769 inline bool apply (hb_apply_context_t *c, ChainContextApplyLookupContext &lookup_context) const
1771 TRACE_APPLY (this);
1772 unsigned int num_rules = rule.len;
1773 for (unsigned int i = 0; i < num_rules; i++)
1774 if ((this+rule[i]).apply (c, lookup_context))
1775 return TRACE_RETURN (true);
1777 return TRACE_RETURN (false);
1780 inline bool sanitize (hb_sanitize_context_t *c) const
1782 TRACE_SANITIZE (this);
1783 return TRACE_RETURN (rule.sanitize (c, this));
1786 protected:
1787 OffsetArrayOf<ChainRule>
1788 rule; /* Array of ChainRule tables
1789 * ordered by preference */
1790 public:
1791 DEFINE_SIZE_ARRAY (2, rule);
1794 struct ChainContextFormat1
1796 inline void closure (hb_closure_context_t *c) const
1798 TRACE_CLOSURE (this);
1799 const Coverage &cov = (this+coverage);
1801 struct ChainContextClosureLookupContext lookup_context = {
1802 {intersects_glyph},
1803 {NULL, NULL, NULL}
1806 unsigned int count = ruleSet.len;
1807 for (unsigned int i = 0; i < count; i++)
1808 if (cov.intersects_coverage (c->glyphs, i)) {
1809 const ChainRuleSet &rule_set = this+ruleSet[i];
1810 rule_set.closure (c, lookup_context);
1814 inline void collect_glyphs (hb_collect_glyphs_context_t *c) const
1816 TRACE_COLLECT_GLYPHS (this);
1817 (this+coverage).add_coverage (c->input);
1819 struct ChainContextCollectGlyphsLookupContext lookup_context = {
1820 {collect_glyph},
1821 {NULL, NULL, NULL}
1824 unsigned int count = ruleSet.len;
1825 for (unsigned int i = 0; i < count; i++)
1826 (this+ruleSet[i]).collect_glyphs (c, lookup_context);
1829 inline bool would_apply (hb_would_apply_context_t *c) const
1831 TRACE_WOULD_APPLY (this);
1833 const ChainRuleSet &rule_set = this+ruleSet[(this+coverage).get_coverage (c->glyphs[0])];
1834 struct ChainContextApplyLookupContext lookup_context = {
1835 {match_glyph},
1836 {NULL, NULL, NULL}
1838 return TRACE_RETURN (rule_set.would_apply (c, lookup_context));
1841 inline const Coverage &get_coverage (void) const
1843 return this+coverage;
1846 inline bool apply (hb_apply_context_t *c) const
1848 TRACE_APPLY (this);
1849 unsigned int index = (this+coverage).get_coverage (c->buffer->cur().codepoint);
1850 if (likely (index == NOT_COVERED)) return TRACE_RETURN (false);
1852 const ChainRuleSet &rule_set = this+ruleSet[index];
1853 struct ChainContextApplyLookupContext lookup_context = {
1854 {match_glyph},
1855 {NULL, NULL, NULL}
1857 return TRACE_RETURN (rule_set.apply (c, lookup_context));
1860 inline bool sanitize (hb_sanitize_context_t *c) const
1862 TRACE_SANITIZE (this);
1863 return TRACE_RETURN (coverage.sanitize (c, this) && ruleSet.sanitize (c, this));
1866 protected:
1867 USHORT format; /* Format identifier--format = 1 */
1868 OffsetTo<Coverage>
1869 coverage; /* Offset to Coverage table--from
1870 * beginning of table */
1871 OffsetArrayOf<ChainRuleSet>
1872 ruleSet; /* Array of ChainRuleSet tables
1873 * ordered by Coverage Index */
1874 public:
1875 DEFINE_SIZE_ARRAY (6, ruleSet);
1878 struct ChainContextFormat2
1880 inline void closure (hb_closure_context_t *c) const
1882 TRACE_CLOSURE (this);
1883 if (!(this+coverage).intersects (c->glyphs))
1884 return;
1886 const ClassDef &backtrack_class_def = this+backtrackClassDef;
1887 const ClassDef &input_class_def = this+inputClassDef;
1888 const ClassDef &lookahead_class_def = this+lookaheadClassDef;
1890 struct ChainContextClosureLookupContext lookup_context = {
1891 {intersects_class},
1892 {&backtrack_class_def,
1893 &input_class_def,
1894 &lookahead_class_def}
1897 unsigned int count = ruleSet.len;
1898 for (unsigned int i = 0; i < count; i++)
1899 if (input_class_def.intersects_class (c->glyphs, i)) {
1900 const ChainRuleSet &rule_set = this+ruleSet[i];
1901 rule_set.closure (c, lookup_context);
1905 inline void collect_glyphs (hb_collect_glyphs_context_t *c) const
1907 TRACE_COLLECT_GLYPHS (this);
1908 (this+coverage).add_coverage (c->input);
1910 const ClassDef &backtrack_class_def = this+backtrackClassDef;
1911 const ClassDef &input_class_def = this+inputClassDef;
1912 const ClassDef &lookahead_class_def = this+lookaheadClassDef;
1914 struct ChainContextCollectGlyphsLookupContext lookup_context = {
1915 {collect_class},
1916 {&backtrack_class_def,
1917 &input_class_def,
1918 &lookahead_class_def}
1921 unsigned int count = ruleSet.len;
1922 for (unsigned int i = 0; i < count; i++)
1923 (this+ruleSet[i]).collect_glyphs (c, lookup_context);
1926 inline bool would_apply (hb_would_apply_context_t *c) const
1928 TRACE_WOULD_APPLY (this);
1930 const ClassDef &backtrack_class_def = this+backtrackClassDef;
1931 const ClassDef &input_class_def = this+inputClassDef;
1932 const ClassDef &lookahead_class_def = this+lookaheadClassDef;
1934 unsigned int index = input_class_def.get_class (c->glyphs[0]);
1935 const ChainRuleSet &rule_set = this+ruleSet[index];
1936 struct ChainContextApplyLookupContext lookup_context = {
1937 {match_class},
1938 {&backtrack_class_def,
1939 &input_class_def,
1940 &lookahead_class_def}
1942 return TRACE_RETURN (rule_set.would_apply (c, lookup_context));
1945 inline const Coverage &get_coverage (void) const
1947 return this+coverage;
1950 inline bool apply (hb_apply_context_t *c) const
1952 TRACE_APPLY (this);
1953 unsigned int index = (this+coverage).get_coverage (c->buffer->cur().codepoint);
1954 if (likely (index == NOT_COVERED)) return TRACE_RETURN (false);
1956 const ClassDef &backtrack_class_def = this+backtrackClassDef;
1957 const ClassDef &input_class_def = this+inputClassDef;
1958 const ClassDef &lookahead_class_def = this+lookaheadClassDef;
1960 index = input_class_def.get_class (c->buffer->cur().codepoint);
1961 const ChainRuleSet &rule_set = this+ruleSet[index];
1962 struct ChainContextApplyLookupContext lookup_context = {
1963 {match_class},
1964 {&backtrack_class_def,
1965 &input_class_def,
1966 &lookahead_class_def}
1968 return TRACE_RETURN (rule_set.apply (c, lookup_context));
1971 inline bool sanitize (hb_sanitize_context_t *c) const
1973 TRACE_SANITIZE (this);
1974 return TRACE_RETURN (coverage.sanitize (c, this) && backtrackClassDef.sanitize (c, this) &&
1975 inputClassDef.sanitize (c, this) && lookaheadClassDef.sanitize (c, this) &&
1976 ruleSet.sanitize (c, this));
1979 protected:
1980 USHORT format; /* Format identifier--format = 2 */
1981 OffsetTo<Coverage>
1982 coverage; /* Offset to Coverage table--from
1983 * beginning of table */
1984 OffsetTo<ClassDef>
1985 backtrackClassDef; /* Offset to glyph ClassDef table
1986 * containing backtrack sequence
1987 * data--from beginning of table */
1988 OffsetTo<ClassDef>
1989 inputClassDef; /* Offset to glyph ClassDef
1990 * table containing input sequence
1991 * data--from beginning of table */
1992 OffsetTo<ClassDef>
1993 lookaheadClassDef; /* Offset to glyph ClassDef table
1994 * containing lookahead sequence
1995 * data--from beginning of table */
1996 OffsetArrayOf<ChainRuleSet>
1997 ruleSet; /* Array of ChainRuleSet tables
1998 * ordered by class */
1999 public:
2000 DEFINE_SIZE_ARRAY (12, ruleSet);
2003 struct ChainContextFormat3
2005 inline void closure (hb_closure_context_t *c) const
2007 TRACE_CLOSURE (this);
2008 const OffsetArrayOf<Coverage> &input = StructAfter<OffsetArrayOf<Coverage> > (backtrack);
2010 if (!(this+input[0]).intersects (c->glyphs))
2011 return;
2013 const OffsetArrayOf<Coverage> &lookahead = StructAfter<OffsetArrayOf<Coverage> > (input);
2014 const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead);
2015 struct ChainContextClosureLookupContext lookup_context = {
2016 {intersects_coverage},
2017 {this, this, this}
2019 chain_context_closure_lookup (c,
2020 backtrack.len, (const USHORT *) backtrack.array,
2021 input.len, (const USHORT *) input.array + 1,
2022 lookahead.len, (const USHORT *) lookahead.array,
2023 lookup.len, lookup.array,
2024 lookup_context);
2027 inline void collect_glyphs (hb_collect_glyphs_context_t *c) const
2029 TRACE_COLLECT_GLYPHS (this);
2030 const OffsetArrayOf<Coverage> &input = StructAfter<OffsetArrayOf<Coverage> > (backtrack);
2032 (this+input[0]).add_coverage (c->input);
2034 const OffsetArrayOf<Coverage> &lookahead = StructAfter<OffsetArrayOf<Coverage> > (input);
2035 const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead);
2036 struct ChainContextCollectGlyphsLookupContext lookup_context = {
2037 {collect_coverage},
2038 {this, this, this}
2040 chain_context_collect_glyphs_lookup (c,
2041 backtrack.len, (const USHORT *) backtrack.array,
2042 input.len, (const USHORT *) input.array + 1,
2043 lookahead.len, (const USHORT *) lookahead.array,
2044 lookup.len, lookup.array,
2045 lookup_context);
2048 inline bool would_apply (hb_would_apply_context_t *c) const
2050 TRACE_WOULD_APPLY (this);
2052 const OffsetArrayOf<Coverage> &input = StructAfter<OffsetArrayOf<Coverage> > (backtrack);
2053 const OffsetArrayOf<Coverage> &lookahead = StructAfter<OffsetArrayOf<Coverage> > (input);
2054 const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead);
2055 struct ChainContextApplyLookupContext lookup_context = {
2056 {match_coverage},
2057 {this, this, this}
2059 return TRACE_RETURN (chain_context_would_apply_lookup (c,
2060 backtrack.len, (const USHORT *) backtrack.array,
2061 input.len, (const USHORT *) input.array + 1,
2062 lookahead.len, (const USHORT *) lookahead.array,
2063 lookup.len, lookup.array, lookup_context));
2066 inline const Coverage &get_coverage (void) const
2068 const OffsetArrayOf<Coverage> &input = StructAfter<OffsetArrayOf<Coverage> > (backtrack);
2069 return this+input[0];
2072 inline bool apply (hb_apply_context_t *c) const
2074 TRACE_APPLY (this);
2075 const OffsetArrayOf<Coverage> &input = StructAfter<OffsetArrayOf<Coverage> > (backtrack);
2077 unsigned int index = (this+input[0]).get_coverage (c->buffer->cur().codepoint);
2078 if (likely (index == NOT_COVERED)) return TRACE_RETURN (false);
2080 const OffsetArrayOf<Coverage> &lookahead = StructAfter<OffsetArrayOf<Coverage> > (input);
2081 const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead);
2082 struct ChainContextApplyLookupContext lookup_context = {
2083 {match_coverage},
2084 {this, this, this}
2086 return TRACE_RETURN (chain_context_apply_lookup (c,
2087 backtrack.len, (const USHORT *) backtrack.array,
2088 input.len, (const USHORT *) input.array + 1,
2089 lookahead.len, (const USHORT *) lookahead.array,
2090 lookup.len, lookup.array, lookup_context));
2093 inline bool sanitize (hb_sanitize_context_t *c) const
2095 TRACE_SANITIZE (this);
2096 if (!backtrack.sanitize (c, this)) return TRACE_RETURN (false);
2097 const OffsetArrayOf<Coverage> &input = StructAfter<OffsetArrayOf<Coverage> > (backtrack);
2098 if (!input.sanitize (c, this)) return TRACE_RETURN (false);
2099 if (!input.len) return TRACE_RETURN (false); /* To be consistent with Context. */
2100 const OffsetArrayOf<Coverage> &lookahead = StructAfter<OffsetArrayOf<Coverage> > (input);
2101 if (!lookahead.sanitize (c, this)) return TRACE_RETURN (false);
2102 const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead);
2103 return TRACE_RETURN (lookup.sanitize (c));
2106 protected:
2107 USHORT format; /* Format identifier--format = 3 */
2108 OffsetArrayOf<Coverage>
2109 backtrack; /* Array of coverage tables
2110 * in backtracking sequence, in glyph
2111 * sequence order */
2112 OffsetArrayOf<Coverage>
2113 inputX ; /* Array of coverage
2114 * tables in input sequence, in glyph
2115 * sequence order */
2116 OffsetArrayOf<Coverage>
2117 lookaheadX; /* Array of coverage tables
2118 * in lookahead sequence, in glyph
2119 * sequence order */
2120 ArrayOf<LookupRecord>
2121 lookupX; /* Array of LookupRecords--in
2122 * design order) */
2123 public:
2124 DEFINE_SIZE_MIN (10);
2127 struct ChainContext
2129 template <typename context_t>
2130 inline typename context_t::return_t dispatch (context_t *c) const
2132 TRACE_DISPATCH (this, u.format);
2133 if (unlikely (!c->may_dispatch (this, &u.format))) TRACE_RETURN (c->default_return_value ());
2134 switch (u.format) {
2135 case 1: return TRACE_RETURN (c->dispatch (u.format1));
2136 case 2: return TRACE_RETURN (c->dispatch (u.format2));
2137 case 3: return TRACE_RETURN (c->dispatch (u.format3));
2138 default:return TRACE_RETURN (c->default_return_value ());
2142 protected:
2143 union {
2144 USHORT format; /* Format identifier */
2145 ChainContextFormat1 format1;
2146 ChainContextFormat2 format2;
2147 ChainContextFormat3 format3;
2148 } u;
2152 template <typename T>
2153 struct ExtensionFormat1
2155 inline unsigned int get_type (void) const { return extensionLookupType; }
2157 template <typename X>
2158 inline const X& get_subtable (void) const
2160 unsigned int offset = extensionOffset;
2161 if (unlikely (!offset)) return Null(typename T::LookupSubTable);
2162 return StructAtOffset<typename T::LookupSubTable> (this, offset);
2165 template <typename context_t>
2166 inline typename context_t::return_t dispatch (context_t *c) const
2168 TRACE_DISPATCH (this, format);
2169 if (unlikely (!c->may_dispatch (this, this))) TRACE_RETURN (c->default_return_value ());
2170 return TRACE_RETURN (get_subtable<typename T::LookupSubTable> ().dispatch (c, get_type ()));
2173 /* This is called from may_dispatch() above with hb_sanitize_context_t. */
2174 inline bool sanitize (hb_sanitize_context_t *c) const
2176 TRACE_SANITIZE (this);
2177 return TRACE_RETURN (c->check_struct (this) && extensionOffset != 0);
2180 protected:
2181 USHORT format; /* Format identifier. Set to 1. */
2182 USHORT extensionLookupType; /* Lookup type of subtable referenced
2183 * by ExtensionOffset (i.e. the
2184 * extension subtable). */
2185 ULONG extensionOffset; /* Offset to the extension subtable,
2186 * of lookup type subtable. */
2187 public:
2188 DEFINE_SIZE_STATIC (8);
2191 template <typename T>
2192 struct Extension
2194 inline unsigned int get_type (void) const
2196 switch (u.format) {
2197 case 1: return u.format1.get_type ();
2198 default:return 0;
2201 template <typename X>
2202 inline const X& get_subtable (void) const
2204 switch (u.format) {
2205 case 1: return u.format1.template get_subtable<typename T::LookupSubTable> ();
2206 default:return Null(typename T::LookupSubTable);
2210 template <typename context_t>
2211 inline typename context_t::return_t dispatch (context_t *c) const
2213 TRACE_DISPATCH (this, u.format);
2214 if (unlikely (!c->may_dispatch (this, &u.format))) TRACE_RETURN (c->default_return_value ());
2215 switch (u.format) {
2216 case 1: return TRACE_RETURN (u.format1.dispatch (c));
2217 default:return TRACE_RETURN (c->default_return_value ());
2221 protected:
2222 union {
2223 USHORT format; /* Format identifier */
2224 ExtensionFormat1<T> format1;
2225 } u;
2230 * GSUB/GPOS Common
2233 struct GSUBGPOS
2235 static const hb_tag_t GSUBTag = HB_OT_TAG_GSUB;
2236 static const hb_tag_t GPOSTag = HB_OT_TAG_GPOS;
2238 inline unsigned int get_script_count (void) const
2239 { return (this+scriptList).len; }
2240 inline const Tag& get_script_tag (unsigned int i) const
2241 { return (this+scriptList).get_tag (i); }
2242 inline unsigned int get_script_tags (unsigned int start_offset,
2243 unsigned int *script_count /* IN/OUT */,
2244 hb_tag_t *script_tags /* OUT */) const
2245 { return (this+scriptList).get_tags (start_offset, script_count, script_tags); }
2246 inline const Script& get_script (unsigned int i) const
2247 { return (this+scriptList)[i]; }
2248 inline bool find_script_index (hb_tag_t tag, unsigned int *index) const
2249 { return (this+scriptList).find_index (tag, index); }
2251 inline unsigned int get_feature_count (void) const
2252 { return (this+featureList).len; }
2253 inline hb_tag_t get_feature_tag (unsigned int i) const
2254 { return i == Index::NOT_FOUND_INDEX ? HB_TAG_NONE : (this+featureList).get_tag (i); }
2255 inline unsigned int get_feature_tags (unsigned int start_offset,
2256 unsigned int *feature_count /* IN/OUT */,
2257 hb_tag_t *feature_tags /* OUT */) const
2258 { return (this+featureList).get_tags (start_offset, feature_count, feature_tags); }
2259 inline const Feature& get_feature (unsigned int i) const
2260 { return (this+featureList)[i]; }
2261 inline bool find_feature_index (hb_tag_t tag, unsigned int *index) const
2262 { return (this+featureList).find_index (tag, index); }
2264 inline unsigned int get_lookup_count (void) const
2265 { return (this+lookupList).len; }
2266 inline const Lookup& get_lookup (unsigned int i) const
2267 { return (this+lookupList)[i]; }
2269 inline bool sanitize (hb_sanitize_context_t *c) const
2271 TRACE_SANITIZE (this);
2272 return TRACE_RETURN (version.sanitize (c) && likely (version.major == 1) &&
2273 scriptList.sanitize (c, this) &&
2274 featureList.sanitize (c, this) &&
2275 lookupList.sanitize (c, this));
2278 protected:
2279 FixedVersion version; /* Version of the GSUB/GPOS table--initially set
2280 * to 0x00010000u */
2281 OffsetTo<ScriptList>
2282 scriptList; /* ScriptList table */
2283 OffsetTo<FeatureList>
2284 featureList; /* FeatureList table */
2285 OffsetTo<LookupList>
2286 lookupList; /* LookupList table */
2287 public:
2288 DEFINE_SIZE_STATIC (10);
2292 } /* namespace OT */
2295 #endif /* HB_OT_LAYOUT_GSUBGPOS_PRIVATE_HH */