Mailbox support for texture layers.
[chromium-blink-merge.git] / third_party / harfbuzz-ng / src / hb-ot-layout-gsubgpos-private.hh
blobdd7bdd3aeabb4f5c33a62d427c3de30b2c5ac928
1 /*
2 * Copyright © 2007,2008,2009,2010 Red Hat, Inc.
3 * Copyright © 2010,2012 Google, Inc.
5 * This is part of HarfBuzz, a text shaping library.
7 * Permission is hereby granted, without written agreement and without
8 * license or royalty fees, to use, copy, modify, and distribute this
9 * software and its documentation for any purpose, provided that the
10 * above copyright notice and the following two paragraphs appear in
11 * all copies of this software.
13 * IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR
14 * DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES
15 * ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN
16 * IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
17 * DAMAGE.
19 * THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING,
20 * BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
21 * FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE PROVIDED HEREUNDER IS
22 * ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO
23 * PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
25 * Red Hat Author(s): Behdad Esfahbod
26 * Google Author(s): Behdad Esfahbod
29 #ifndef HB_OT_LAYOUT_GSUBGPOS_PRIVATE_HH
30 #define HB_OT_LAYOUT_GSUBGPOS_PRIVATE_HH
32 #include "hb-buffer-private.hh"
33 #include "hb-ot-layout-gdef-table.hh"
34 #include "hb-set-private.hh"
37 namespace OT {
40 #ifndef HB_DEBUG_CLOSURE
41 #define HB_DEBUG_CLOSURE (HB_DEBUG+0)
42 #endif
44 #define TRACE_CLOSURE() \
45 hb_auto_trace_t<HB_DEBUG_CLOSURE> trace (&c->debug_depth, "CLOSURE", this, HB_FUNC, "");
48 struct hb_closure_context_t
50 hb_face_t *face;
51 hb_set_t *glyphs;
52 unsigned int nesting_level_left;
53 unsigned int debug_depth;
56 hb_closure_context_t (hb_face_t *face_,
57 hb_set_t *glyphs_,
58 unsigned int nesting_level_left_ = MAX_NESTING_LEVEL) :
59 face (face_),
60 glyphs (glyphs_),
61 nesting_level_left (nesting_level_left_),
62 debug_depth (0) {}
67 /* TODO Add TRACE_RETURN annotation to gsub. */
68 #ifndef HB_DEBUG_WOULD_APPLY
69 #define HB_DEBUG_WOULD_APPLY (HB_DEBUG+0)
70 #endif
72 #define TRACE_WOULD_APPLY() \
73 hb_auto_trace_t<HB_DEBUG_WOULD_APPLY> trace (&c->debug_depth, "WOULD_APPLY", this, HB_FUNC, "%d glyphs", c->len);
76 struct hb_would_apply_context_t
78 hb_face_t *face;
79 const hb_codepoint_t *glyphs;
80 unsigned int len;
81 bool zero_context;
82 unsigned int debug_depth;
84 hb_would_apply_context_t (hb_face_t *face_,
85 const hb_codepoint_t *glyphs_,
86 unsigned int len_,
87 bool zero_context_) :
88 face (face_),
89 glyphs (glyphs_),
90 len (len_),
91 zero_context (zero_context_),
92 debug_depth (0) {};
96 #ifndef HB_DEBUG_APPLY
97 #define HB_DEBUG_APPLY (HB_DEBUG+0)
98 #endif
100 #define TRACE_APPLY() \
101 hb_auto_trace_t<HB_DEBUG_APPLY> trace (&c->debug_depth, "APPLY", this, HB_FUNC, "idx %d codepoint %u", c->buffer->idx, c->buffer->cur().codepoint);
104 struct hb_apply_context_t
106 hb_font_t *font;
107 hb_face_t *face;
108 hb_buffer_t *buffer;
109 hb_direction_t direction;
110 hb_mask_t lookup_mask;
111 unsigned int nesting_level_left;
112 unsigned int lookup_props;
113 unsigned int property; /* propety of first glyph */
114 unsigned int debug_depth;
115 const GDEF &gdef;
116 bool has_glyph_classes;
119 hb_apply_context_t (hb_font_t *font_,
120 hb_buffer_t *buffer_,
121 hb_mask_t lookup_mask_) :
122 font (font_), face (font->face), buffer (buffer_),
123 direction (buffer_->props.direction),
124 lookup_mask (lookup_mask_),
125 nesting_level_left (MAX_NESTING_LEVEL),
126 lookup_props (0), property (0), debug_depth (0),
127 gdef (*hb_ot_layout_from_face (face)->gdef),
128 has_glyph_classes (gdef.has_glyph_classes ()) {}
130 void set_lookup_props (unsigned int lookup_props_) {
131 lookup_props = lookup_props_;
134 void set_lookup (const Lookup &l) {
135 lookup_props = l.get_props ();
138 struct mark_skipping_forward_iterator_t
140 inline mark_skipping_forward_iterator_t (hb_apply_context_t *c_,
141 unsigned int start_index_,
142 unsigned int num_items_,
143 bool context_match = false)
145 c = c_;
146 idx = start_index_;
147 num_items = num_items_;
148 mask = context_match ? -1 : c->lookup_mask;
149 syllable = context_match ? 0 : c->buffer->cur().syllable ();
150 end = c->buffer->len;
152 inline bool has_no_chance (void) const
154 return unlikely (num_items && idx + num_items >= end);
156 inline void reject (void)
158 num_items++;
160 inline bool next (unsigned int *property_out,
161 unsigned int lookup_props)
163 assert (num_items > 0);
166 if (has_no_chance ())
167 return false;
168 idx++;
169 } while (c->should_skip_mark (&c->buffer->info[idx], lookup_props, property_out));
170 num_items--;
171 return (c->buffer->info[idx].mask & mask) && (!syllable || syllable == c->buffer->info[idx].syllable ());
173 inline bool next (unsigned int *property_out = NULL)
175 return next (property_out, c->lookup_props);
178 unsigned int idx;
179 protected:
180 hb_apply_context_t *c;
181 unsigned int num_items;
182 hb_mask_t mask;
183 uint8_t syllable;
184 unsigned int end;
187 struct mark_skipping_backward_iterator_t
189 inline mark_skipping_backward_iterator_t (hb_apply_context_t *c_,
190 unsigned int start_index_,
191 unsigned int num_items_,
192 hb_mask_t mask_ = 0,
193 bool match_syllable_ = true)
195 c = c_;
196 idx = start_index_;
197 num_items = num_items_;
198 mask = mask_ ? mask_ : c->lookup_mask;
199 syllable = match_syllable_ ? c->buffer->cur().syllable () : 0;
201 inline bool has_no_chance (void) const
203 return unlikely (idx < num_items);
205 inline void reject (void)
207 num_items++;
209 inline bool prev (unsigned int *property_out,
210 unsigned int lookup_props)
212 assert (num_items > 0);
215 if (has_no_chance ())
216 return false;
217 idx--;
218 } while (c->should_skip_mark (&c->buffer->out_info[idx], lookup_props, property_out));
219 num_items--;
220 return (c->buffer->out_info[idx].mask & mask) && (!syllable || syllable == c->buffer->out_info[idx].syllable ());
222 inline bool prev (unsigned int *property_out = NULL)
224 return prev (property_out, c->lookup_props);
227 unsigned int idx;
228 protected:
229 hb_apply_context_t *c;
230 unsigned int num_items;
231 hb_mask_t mask;
232 uint8_t syllable;
235 inline bool
236 match_properties_mark (hb_codepoint_t glyph,
237 unsigned int glyph_props,
238 unsigned int lookup_props) const
240 /* If using mark filtering sets, the high short of
241 * lookup_props has the set index.
243 if (lookup_props & LookupFlag::UseMarkFilteringSet)
244 return gdef.mark_set_covers (lookup_props >> 16, glyph);
246 /* The second byte of lookup_props has the meaning
247 * "ignore marks of attachment type different than
248 * the attachment type specified."
250 if (lookup_props & LookupFlag::MarkAttachmentType)
251 return (lookup_props & LookupFlag::MarkAttachmentType) == (glyph_props & LookupFlag::MarkAttachmentType);
253 return true;
256 inline bool
257 match_properties (hb_codepoint_t glyph,
258 unsigned int glyph_props,
259 unsigned int lookup_props) const
261 /* Not covered, if, for example, glyph class is ligature and
262 * lookup_props includes LookupFlags::IgnoreLigatures
264 if (glyph_props & lookup_props & LookupFlag::IgnoreFlags)
265 return false;
267 if (unlikely (glyph_props & HB_OT_LAYOUT_GLYPH_CLASS_MARK))
268 return match_properties_mark (glyph, glyph_props, lookup_props);
270 return true;
273 inline bool
274 check_glyph_property (hb_glyph_info_t *info,
275 unsigned int lookup_props,
276 unsigned int *property_out) const
278 unsigned int property;
280 property = info->glyph_props();
281 *property_out = property;
283 return match_properties (info->codepoint, property, lookup_props);
286 inline bool
287 should_skip_mark (hb_glyph_info_t *info,
288 unsigned int lookup_props,
289 unsigned int *property_out) const
291 unsigned int property;
293 property = info->glyph_props();
294 if (property_out)
295 *property_out = property;
297 /* If it's a mark, skip it if we don't accept it. */
298 if (unlikely (property & HB_OT_LAYOUT_GLYPH_CLASS_MARK))
299 return !match_properties (info->codepoint, property, lookup_props);
301 /* If not a mark, don't skip. */
302 return false;
306 inline bool should_mark_skip_current_glyph (void) const
308 return should_skip_mark (&buffer->cur(), lookup_props, NULL);
311 inline void set_class (hb_codepoint_t glyph_index, unsigned int class_guess) const
313 if (likely (has_glyph_classes))
314 buffer->cur().glyph_props() = gdef.get_glyph_props (glyph_index);
315 else if (class_guess)
316 buffer->cur().glyph_props() = class_guess;
319 inline void output_glyph (hb_codepoint_t glyph_index,
320 unsigned int class_guess = 0) const
322 set_class (glyph_index, class_guess);
323 buffer->output_glyph (glyph_index);
325 inline void replace_glyph (hb_codepoint_t glyph_index,
326 unsigned int class_guess = 0) const
328 set_class (glyph_index, class_guess);
329 buffer->replace_glyph (glyph_index);
331 inline void replace_glyph_inplace (hb_codepoint_t glyph_index,
332 unsigned int class_guess = 0) const
334 set_class (glyph_index, class_guess);
335 buffer->cur().codepoint = glyph_index;
341 typedef bool (*intersects_func_t) (hb_set_t *glyphs, const USHORT &value, const void *data);
342 typedef bool (*match_func_t) (hb_codepoint_t glyph_id, const USHORT &value, const void *data);
343 typedef void (*closure_lookup_func_t) (hb_closure_context_t *c, unsigned int lookup_index);
344 typedef bool (*apply_lookup_func_t) (hb_apply_context_t *c, unsigned int lookup_index);
346 struct ContextClosureFuncs
348 intersects_func_t intersects;
349 closure_lookup_func_t closure;
351 struct ContextApplyFuncs
353 match_func_t match;
354 apply_lookup_func_t apply;
357 static inline bool intersects_glyph (hb_set_t *glyphs, const USHORT &value, const void *data HB_UNUSED)
359 return glyphs->has (value);
361 static inline bool intersects_class (hb_set_t *glyphs, const USHORT &value, const void *data)
363 const ClassDef &class_def = *reinterpret_cast<const ClassDef *>(data);
364 return class_def.intersects_class (glyphs, value);
366 static inline bool intersects_coverage (hb_set_t *glyphs, const USHORT &value, const void *data)
368 const OffsetTo<Coverage> &coverage = (const OffsetTo<Coverage>&)value;
369 return (data+coverage).intersects (glyphs);
372 static inline bool intersects_array (hb_closure_context_t *c,
373 unsigned int count,
374 const USHORT values[],
375 intersects_func_t intersects_func,
376 const void *intersects_data)
378 for (unsigned int i = 0; i < count; i++)
379 if (likely (!intersects_func (c->glyphs, values[i], intersects_data)))
380 return false;
381 return true;
385 static inline bool match_glyph (hb_codepoint_t glyph_id, const USHORT &value, const void *data HB_UNUSED)
387 return glyph_id == value;
389 static inline bool match_class (hb_codepoint_t glyph_id, const USHORT &value, const void *data)
391 const ClassDef &class_def = *reinterpret_cast<const ClassDef *>(data);
392 return class_def.get_class (glyph_id) == value;
394 static inline bool match_coverage (hb_codepoint_t glyph_id, const USHORT &value, const void *data)
396 const OffsetTo<Coverage> &coverage = (const OffsetTo<Coverage>&)value;
397 return (data+coverage).get_coverage (glyph_id) != NOT_COVERED;
401 static inline bool would_match_input (hb_would_apply_context_t *c,
402 unsigned int count, /* Including the first glyph (not matched) */
403 const USHORT input[], /* Array of input values--start with second glyph */
404 match_func_t match_func,
405 const void *match_data)
407 if (count != c->len)
408 return false;
410 for (unsigned int i = 1; i < count; i++)
411 if (likely (!match_func (c->glyphs[i], input[i - 1], match_data)))
412 return false;
414 return true;
416 static inline bool match_input (hb_apply_context_t *c,
417 unsigned int count, /* Including the first glyph (not matched) */
418 const USHORT input[], /* Array of input values--start with second glyph */
419 match_func_t match_func,
420 const void *match_data,
421 unsigned int *end_offset = NULL,
422 bool *p_is_mark_ligature = NULL,
423 unsigned int *p_total_component_count = NULL)
425 hb_auto_trace_t<HB_DEBUG_APPLY> trace (&c->debug_depth, "APPLY", NULL, HB_FUNC, "idx %d codepoint %u", c->buffer->idx, c->buffer->cur().codepoint);
427 hb_apply_context_t::mark_skipping_forward_iterator_t skippy_iter (c, c->buffer->idx, count - 1);
428 if (skippy_iter.has_no_chance ()) return TRACE_RETURN (false);
431 * This is perhaps the trickiest part of OpenType... Remarks:
433 * - If all components of the ligature were marks, we call this a mark ligature.
435 * - If there is no GDEF, and the ligature is NOT a mark ligature, we categorize
436 * it as a ligature glyph.
438 * - Ligatures cannot be formed across glyphs attached to different components
439 * of previous ligatures. Eg. the sequence is LAM,SHADDA,LAM,FATHA,HEH, and
440 * LAM,LAM,HEH form a ligature, leaving SHADDA,FATHA next to eachother.
441 * However, it would be wrong to ligate that SHADDA,FATHA sequence.o
442 * There is an exception to this: If a ligature tries ligating with marks that
443 * belong to it itself, go ahead, assuming that the font designer knows what
444 * they are doing (otherwise it can break Indic stuff when a matra wants to
445 * ligate with a conjunct...)
448 bool is_mark_ligature = !!(c->property & HB_OT_LAYOUT_GLYPH_CLASS_MARK);
450 unsigned int total_component_count = 0;
451 total_component_count += get_lig_num_comps (c->buffer->cur());
453 unsigned int first_lig_id = get_lig_id (c->buffer->cur());
454 unsigned int first_lig_comp = get_lig_comp (c->buffer->cur());
456 for (unsigned int i = 1; i < count; i++)
458 unsigned int property;
460 if (!skippy_iter.next (&property)) return TRACE_RETURN (false);
462 if (likely (!match_func (c->buffer->info[skippy_iter.idx].codepoint, input[i - 1], match_data))) return TRACE_RETURN (false);
464 unsigned int this_lig_id = get_lig_id (c->buffer->info[skippy_iter.idx]);
465 unsigned int this_lig_comp = get_lig_comp (c->buffer->info[skippy_iter.idx]);
467 if (first_lig_id && first_lig_comp) {
468 /* If first component was attached to a previous ligature component,
469 * all subsequent components should be attached to the same ligature
470 * component, otherwise we shouldn't ligate them. */
471 if (first_lig_id != this_lig_id || first_lig_comp != this_lig_comp)
472 return TRACE_RETURN (false);
473 } else {
474 /* If first component was NOT attached to a previous ligature component,
475 * all subsequent components should also NOT be attached to any ligature
476 * component, unless they are attached to the first component itself! */
477 if (this_lig_id && this_lig_comp && (this_lig_id != first_lig_id))
478 return TRACE_RETURN (false);
481 is_mark_ligature = is_mark_ligature && (property & HB_OT_LAYOUT_GLYPH_CLASS_MARK);
482 total_component_count += get_lig_num_comps (c->buffer->info[skippy_iter.idx]);
485 if (end_offset)
486 *end_offset = skippy_iter.idx - c->buffer->idx + 1;
488 if (p_is_mark_ligature)
489 *p_is_mark_ligature = is_mark_ligature;
491 if (p_total_component_count)
492 *p_total_component_count = total_component_count;
494 return TRACE_RETURN (true);
496 static inline void ligate_input (hb_apply_context_t *c,
497 unsigned int count, /* Including the first glyph (not matched) */
498 const USHORT input[], /* Array of input values--start with second glyph */
499 hb_codepoint_t lig_glyph,
500 match_func_t match_func,
501 const void *match_data,
502 bool is_mark_ligature,
503 unsigned int total_component_count)
506 * - If it *is* a mark ligature, we don't allocate a new ligature id, and leave
507 * the ligature to keep its old ligature id. This will allow it to attach to
508 * a base ligature in GPOS. Eg. if the sequence is: LAM,LAM,SHADDA,FATHA,HEH,
509 * and LAM,LAM,HEH for a ligature, they will leave SHADDA and FATHA wit a
510 * ligature id and component value of 2. Then if SHADDA,FATHA form a ligature
511 * later, we don't want them to lose their ligature id/component, otherwise
512 * GPOS will fail to correctly position the mark ligature on top of the
513 * LAM,LAM,HEH ligature. See:
514 * https://bugzilla.gnome.org/show_bug.cgi?id=676343
516 * - If a ligature is formed of components that some of which are also ligatures
517 * themselves, and those ligature components had marks attached to *their*
518 * components, we have to attach the marks to the new ligature component
519 * positions! Now *that*'s tricky! And these marks may be following the
520 * last component of the whole sequence, so we should loop forward looking
521 * for them and update them.
523 * Eg. the sequence is LAM,LAM,SHADDA,FATHA,HEH, and the font first forms a
524 * 'calt' ligature of LAM,HEH, leaving the SHADDA and FATHA with a ligature
525 * id and component == 1. Now, during 'liga', the LAM and the LAM-HEH ligature
526 * form a LAM-LAM-HEH ligature. We need to reassign the SHADDA and FATHA to
527 * the new ligature with a component value of 2.
529 * This in fact happened to a font... See:
530 * https://bugzilla.gnome.org/show_bug.cgi?id=437633
533 unsigned int klass = is_mark_ligature ? 0 : HB_OT_LAYOUT_GLYPH_CLASS_LIGATURE;
534 unsigned int lig_id = is_mark_ligature ? 0 : allocate_lig_id (c->buffer);
535 unsigned int last_lig_id = get_lig_id (c->buffer->cur());
536 unsigned int last_num_components = get_lig_num_comps (c->buffer->cur());
537 unsigned int components_so_far = last_num_components;
539 if (!is_mark_ligature)
540 set_lig_props_for_ligature (c->buffer->cur(), lig_id, total_component_count);
541 c->replace_glyph (lig_glyph, klass);
543 for (unsigned int i = 1; i < count; i++)
545 while (c->should_mark_skip_current_glyph ())
547 if (!is_mark_ligature) {
548 unsigned int new_lig_comp = components_so_far - last_num_components +
549 MIN (MAX (get_lig_comp (c->buffer->cur()), 1u), last_num_components);
550 set_lig_props_for_mark (c->buffer->cur(), lig_id, new_lig_comp);
552 c->buffer->next_glyph ();
555 last_lig_id = get_lig_id (c->buffer->cur());
556 last_num_components = get_lig_num_comps (c->buffer->cur());
557 components_so_far += last_num_components;
559 /* Skip the base glyph */
560 c->buffer->idx++;
563 if (!is_mark_ligature && last_lig_id) {
564 /* Re-adjust components for any marks following. */
565 for (unsigned int i = c->buffer->idx; i < c->buffer->len; i++) {
566 if (last_lig_id == get_lig_id (c->buffer->info[i])) {
567 unsigned int new_lig_comp = components_so_far - last_num_components +
568 MIN (MAX (get_lig_comp (c->buffer->info[i]), 1u), last_num_components);
569 set_lig_props_for_mark (c->buffer->info[i], lig_id, new_lig_comp);
570 } else
571 break;
576 static inline bool match_backtrack (hb_apply_context_t *c,
577 unsigned int count,
578 const USHORT backtrack[],
579 match_func_t match_func,
580 const void *match_data)
582 hb_auto_trace_t<HB_DEBUG_APPLY> trace (&c->debug_depth, "APPLY", NULL, HB_FUNC, "idx %d codepoint %u", c->buffer->idx, c->buffer->cur().codepoint);
584 hb_apply_context_t::mark_skipping_backward_iterator_t skippy_iter (c, c->buffer->backtrack_len (), count, true);
585 if (skippy_iter.has_no_chance ())
586 return TRACE_RETURN (false);
588 for (unsigned int i = 0; i < count; i++)
590 if (!skippy_iter.prev ())
591 return TRACE_RETURN (false);
593 if (likely (!match_func (c->buffer->out_info[skippy_iter.idx].codepoint, backtrack[i], match_data)))
594 return TRACE_RETURN (false);
597 return TRACE_RETURN (true);
600 static inline bool match_lookahead (hb_apply_context_t *c,
601 unsigned int count,
602 const USHORT lookahead[],
603 match_func_t match_func,
604 const void *match_data,
605 unsigned int offset)
607 hb_auto_trace_t<HB_DEBUG_APPLY> trace (&c->debug_depth, "APPLY", NULL, HB_FUNC, "idx %d codepoint %u", c->buffer->idx, c->buffer->cur().codepoint);
609 hb_apply_context_t::mark_skipping_forward_iterator_t skippy_iter (c, c->buffer->idx + offset - 1, count, true);
610 if (skippy_iter.has_no_chance ())
611 return TRACE_RETURN (false);
613 for (unsigned int i = 0; i < count; i++)
615 if (!skippy_iter.next ())
616 return TRACE_RETURN (false);
618 if (likely (!match_func (c->buffer->info[skippy_iter.idx].codepoint, lookahead[i], match_data)))
619 return TRACE_RETURN (false);
622 return TRACE_RETURN (true);
627 struct LookupRecord
629 inline bool sanitize (hb_sanitize_context_t *c) {
630 TRACE_SANITIZE ();
631 return TRACE_RETURN (c->check_struct (this));
634 USHORT sequenceIndex; /* Index into current glyph
635 * sequence--first glyph = 0 */
636 USHORT lookupListIndex; /* Lookup to apply to that
637 * position--zero--based */
638 public:
639 DEFINE_SIZE_STATIC (4);
643 static inline void closure_lookup (hb_closure_context_t *c,
644 unsigned int lookupCount,
645 const LookupRecord lookupRecord[], /* Array of LookupRecords--in design order */
646 closure_lookup_func_t closure_func)
648 for (unsigned int i = 0; i < lookupCount; i++)
649 closure_func (c, lookupRecord->lookupListIndex);
652 static inline bool apply_lookup (hb_apply_context_t *c,
653 unsigned int count, /* Including the first glyph */
654 unsigned int lookupCount,
655 const LookupRecord lookupRecord[], /* Array of LookupRecords--in design order */
656 apply_lookup_func_t apply_func)
658 hb_auto_trace_t<HB_DEBUG_APPLY> trace (&c->debug_depth, "APPLY", NULL, HB_FUNC, "idx %d codepoint %u", c->buffer->idx, c->buffer->cur().codepoint);
659 unsigned int end = c->buffer->len;
660 if (unlikely (count == 0 || c->buffer->idx + count > end))
661 return TRACE_RETURN (false);
663 /* TODO We don't support lookupRecord arrays that are not increasing:
664 * Should be easy for in_place ones at least. */
666 /* Note: If sublookup is reverse, it will underflow after the first loop
667 * and we jump out of it. Not entirely disastrous. So we don't check
668 * for reverse lookup here.
670 for (unsigned int i = 0; i < count; /* NOP */)
672 if (unlikely (c->buffer->idx == end))
673 return TRACE_RETURN (true);
674 while (c->should_mark_skip_current_glyph ())
676 /* No lookup applied for this index */
677 c->buffer->next_glyph ();
678 if (unlikely (c->buffer->idx == end))
679 return TRACE_RETURN (true);
682 if (lookupCount && i == lookupRecord->sequenceIndex)
684 unsigned int old_pos = c->buffer->idx;
686 /* Apply a lookup */
687 bool done = apply_func (c, lookupRecord->lookupListIndex);
689 lookupRecord++;
690 lookupCount--;
691 /* Err, this is wrong if the lookup jumped over some glyphs */
692 i += c->buffer->idx - old_pos;
693 if (unlikely (c->buffer->idx == end))
694 return TRACE_RETURN (true);
696 if (!done)
697 goto not_applied;
699 else
701 not_applied:
702 /* No lookup applied for this index */
703 c->buffer->next_glyph ();
704 i++;
708 return TRACE_RETURN (true);
713 /* Contextual lookups */
715 struct ContextClosureLookupContext
717 ContextClosureFuncs funcs;
718 const void *intersects_data;
721 struct ContextApplyLookupContext
723 ContextApplyFuncs funcs;
724 const void *match_data;
727 static inline void context_closure_lookup (hb_closure_context_t *c,
728 unsigned int inputCount, /* Including the first glyph (not matched) */
729 const USHORT input[], /* Array of input values--start with second glyph */
730 unsigned int lookupCount,
731 const LookupRecord lookupRecord[],
732 ContextClosureLookupContext &lookup_context)
734 if (intersects_array (c,
735 inputCount ? inputCount - 1 : 0, input,
736 lookup_context.funcs.intersects, lookup_context.intersects_data))
737 closure_lookup (c,
738 lookupCount, lookupRecord,
739 lookup_context.funcs.closure);
743 static inline bool context_would_apply_lookup (hb_would_apply_context_t *c,
744 unsigned int inputCount, /* Including the first glyph (not matched) */
745 const USHORT input[], /* Array of input values--start with second glyph */
746 unsigned int lookupCount,
747 const LookupRecord lookupRecord[],
748 ContextApplyLookupContext &lookup_context)
750 return would_match_input (c,
751 inputCount, input,
752 lookup_context.funcs.match, lookup_context.match_data);
754 static inline bool context_apply_lookup (hb_apply_context_t *c,
755 unsigned int inputCount, /* Including the first glyph (not matched) */
756 const USHORT input[], /* Array of input values--start with second glyph */
757 unsigned int lookupCount,
758 const LookupRecord lookupRecord[],
759 ContextApplyLookupContext &lookup_context)
761 return match_input (c,
762 inputCount, input,
763 lookup_context.funcs.match, lookup_context.match_data)
764 && apply_lookup (c,
765 inputCount,
766 lookupCount, lookupRecord,
767 lookup_context.funcs.apply);
770 struct Rule
772 friend struct RuleSet;
774 private:
776 inline void closure (hb_closure_context_t *c, ContextClosureLookupContext &lookup_context) const
778 TRACE_CLOSURE ();
779 const LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (input, input[0].static_size * (inputCount ? inputCount - 1 : 0));
780 context_closure_lookup (c,
781 inputCount, input,
782 lookupCount, lookupRecord,
783 lookup_context);
786 inline bool would_apply (hb_would_apply_context_t *c, ContextApplyLookupContext &lookup_context) const
788 TRACE_WOULD_APPLY ();
789 const LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (input, input[0].static_size * (inputCount ? inputCount - 1 : 0));
790 return TRACE_RETURN (context_would_apply_lookup (c, inputCount, input, lookupCount, lookupRecord, lookup_context));
793 inline bool apply (hb_apply_context_t *c, ContextApplyLookupContext &lookup_context) const
795 TRACE_APPLY ();
796 const LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (input, input[0].static_size * (inputCount ? inputCount - 1 : 0));
797 return TRACE_RETURN (context_apply_lookup (c, inputCount, input, lookupCount, lookupRecord, lookup_context));
800 public:
801 inline bool sanitize (hb_sanitize_context_t *c) {
802 TRACE_SANITIZE ();
803 return inputCount.sanitize (c)
804 && lookupCount.sanitize (c)
805 && c->check_range (input,
806 input[0].static_size * inputCount
807 + lookupRecordX[0].static_size * lookupCount);
810 protected:
811 USHORT inputCount; /* Total number of glyphs in input
812 * glyph sequence--includes the first
813 * glyph */
814 USHORT lookupCount; /* Number of LookupRecords */
815 USHORT input[VAR]; /* Array of match inputs--start with
816 * second glyph */
817 LookupRecord lookupRecordX[VAR]; /* Array of LookupRecords--in
818 * design order */
819 public:
820 DEFINE_SIZE_ARRAY2 (4, input, lookupRecordX);
823 struct RuleSet
825 inline void closure (hb_closure_context_t *c, ContextClosureLookupContext &lookup_context) const
827 TRACE_CLOSURE ();
828 unsigned int num_rules = rule.len;
829 for (unsigned int i = 0; i < num_rules; i++)
830 (this+rule[i]).closure (c, lookup_context);
833 inline bool would_apply (hb_would_apply_context_t *c, ContextApplyLookupContext &lookup_context) const
835 TRACE_WOULD_APPLY ();
836 unsigned int num_rules = rule.len;
837 for (unsigned int i = 0; i < num_rules; i++)
839 if ((this+rule[i]).would_apply (c, lookup_context))
840 return TRACE_RETURN (true);
842 return TRACE_RETURN (false);
845 inline bool apply (hb_apply_context_t *c, ContextApplyLookupContext &lookup_context) const
847 TRACE_APPLY ();
848 unsigned int num_rules = rule.len;
849 for (unsigned int i = 0; i < num_rules; i++)
851 if ((this+rule[i]).apply (c, lookup_context))
852 return TRACE_RETURN (true);
854 return TRACE_RETURN (false);
857 inline bool sanitize (hb_sanitize_context_t *c) {
858 TRACE_SANITIZE ();
859 return TRACE_RETURN (rule.sanitize (c, this));
862 protected:
863 OffsetArrayOf<Rule>
864 rule; /* Array of Rule tables
865 * ordered by preference */
866 public:
867 DEFINE_SIZE_ARRAY (2, rule);
871 struct ContextFormat1
873 friend struct Context;
875 private:
877 inline void closure (hb_closure_context_t *c, closure_lookup_func_t closure_func) const
879 TRACE_CLOSURE ();
881 const Coverage &cov = (this+coverage);
883 struct ContextClosureLookupContext lookup_context = {
884 {intersects_glyph, closure_func},
885 NULL
888 unsigned int count = ruleSet.len;
889 for (unsigned int i = 0; i < count; i++)
890 if (cov.intersects_coverage (c->glyphs, i)) {
891 const RuleSet &rule_set = this+ruleSet[i];
892 rule_set.closure (c, lookup_context);
896 inline bool would_apply (hb_would_apply_context_t *c) const
898 TRACE_WOULD_APPLY ();
900 const RuleSet &rule_set = this+ruleSet[(this+coverage) (c->glyphs[0])];
901 struct ContextApplyLookupContext lookup_context = {
902 {match_glyph, NULL},
903 NULL
905 return TRACE_RETURN (rule_set.would_apply (c, lookup_context));
908 inline bool apply (hb_apply_context_t *c, apply_lookup_func_t apply_func) const
910 TRACE_APPLY ();
911 unsigned int index = (this+coverage) (c->buffer->cur().codepoint);
912 if (likely (index == NOT_COVERED))
913 return TRACE_RETURN (false);
915 const RuleSet &rule_set = this+ruleSet[index];
916 struct ContextApplyLookupContext lookup_context = {
917 {match_glyph, apply_func},
918 NULL
920 return TRACE_RETURN (rule_set.apply (c, lookup_context));
923 inline bool sanitize (hb_sanitize_context_t *c) {
924 TRACE_SANITIZE ();
925 return TRACE_RETURN (coverage.sanitize (c, this) && ruleSet.sanitize (c, this));
928 protected:
929 USHORT format; /* Format identifier--format = 1 */
930 OffsetTo<Coverage>
931 coverage; /* Offset to Coverage table--from
932 * beginning of table */
933 OffsetArrayOf<RuleSet>
934 ruleSet; /* Array of RuleSet tables
935 * ordered by Coverage Index */
936 public:
937 DEFINE_SIZE_ARRAY (6, ruleSet);
941 struct ContextFormat2
943 friend struct Context;
945 private:
947 inline void closure (hb_closure_context_t *c, closure_lookup_func_t closure_func) const
949 TRACE_CLOSURE ();
950 if (!(this+coverage).intersects (c->glyphs))
951 return;
953 const ClassDef &class_def = this+classDef;
955 struct ContextClosureLookupContext lookup_context = {
956 {intersects_class, closure_func},
957 NULL
960 unsigned int count = ruleSet.len;
961 for (unsigned int i = 0; i < count; i++)
962 if (class_def.intersects_class (c->glyphs, i)) {
963 const RuleSet &rule_set = this+ruleSet[i];
964 rule_set.closure (c, lookup_context);
968 inline bool would_apply (hb_would_apply_context_t *c) const
970 TRACE_WOULD_APPLY ();
972 const ClassDef &class_def = this+classDef;
973 unsigned int index = class_def (c->glyphs[0]);
974 const RuleSet &rule_set = this+ruleSet[index];
975 struct ContextApplyLookupContext lookup_context = {
976 {match_class, NULL},
977 &class_def
979 return TRACE_RETURN (rule_set.would_apply (c, lookup_context));
982 inline bool apply (hb_apply_context_t *c, apply_lookup_func_t apply_func) const
984 TRACE_APPLY ();
985 unsigned int index = (this+coverage) (c->buffer->cur().codepoint);
986 if (likely (index == NOT_COVERED)) return TRACE_RETURN (false);
988 const ClassDef &class_def = this+classDef;
989 index = class_def (c->buffer->cur().codepoint);
990 const RuleSet &rule_set = this+ruleSet[index];
991 struct ContextApplyLookupContext lookup_context = {
992 {match_class, apply_func},
993 &class_def
995 return TRACE_RETURN (rule_set.apply (c, lookup_context));
998 inline bool sanitize (hb_sanitize_context_t *c) {
999 TRACE_SANITIZE ();
1000 return TRACE_RETURN (coverage.sanitize (c, this) && classDef.sanitize (c, this) && ruleSet.sanitize (c, this));
1003 protected:
1004 USHORT format; /* Format identifier--format = 2 */
1005 OffsetTo<Coverage>
1006 coverage; /* Offset to Coverage table--from
1007 * beginning of table */
1008 OffsetTo<ClassDef>
1009 classDef; /* Offset to glyph ClassDef table--from
1010 * beginning of table */
1011 OffsetArrayOf<RuleSet>
1012 ruleSet; /* Array of RuleSet tables
1013 * ordered by class */
1014 public:
1015 DEFINE_SIZE_ARRAY (8, ruleSet);
1019 struct ContextFormat3
1021 friend struct Context;
1023 private:
1025 inline void closure (hb_closure_context_t *c, closure_lookup_func_t closure_func) const
1027 TRACE_CLOSURE ();
1028 if (!(this+coverage[0]).intersects (c->glyphs))
1029 return;
1031 const LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (coverage, coverage[0].static_size * glyphCount);
1032 struct ContextClosureLookupContext lookup_context = {
1033 {intersects_coverage, closure_func},
1034 this
1036 context_closure_lookup (c,
1037 glyphCount, (const USHORT *) (coverage + 1),
1038 lookupCount, lookupRecord,
1039 lookup_context);
1042 inline bool would_apply (hb_would_apply_context_t *c) const
1044 TRACE_WOULD_APPLY ();
1046 const LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (coverage, coverage[0].static_size * glyphCount);
1047 struct ContextApplyLookupContext lookup_context = {
1048 {match_coverage, NULL},
1049 this
1051 return TRACE_RETURN (context_would_apply_lookup (c, glyphCount, (const USHORT *) (coverage + 1), lookupCount, lookupRecord, lookup_context));
1054 inline bool apply (hb_apply_context_t *c, apply_lookup_func_t apply_func) const
1056 TRACE_APPLY ();
1057 unsigned int index = (this+coverage[0]) (c->buffer->cur().codepoint);
1058 if (likely (index == NOT_COVERED)) return TRACE_RETURN (false);
1060 const LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (coverage, coverage[0].static_size * glyphCount);
1061 struct ContextApplyLookupContext lookup_context = {
1062 {match_coverage, apply_func},
1063 this
1065 return TRACE_RETURN (context_apply_lookup (c, glyphCount, (const USHORT *) (coverage + 1), lookupCount, lookupRecord, lookup_context));
1068 inline bool sanitize (hb_sanitize_context_t *c) {
1069 TRACE_SANITIZE ();
1070 if (!c->check_struct (this)) return TRACE_RETURN (false);
1071 unsigned int count = glyphCount;
1072 if (!c->check_array (coverage, coverage[0].static_size, count)) return TRACE_RETURN (false);
1073 for (unsigned int i = 0; i < count; i++)
1074 if (!coverage[i].sanitize (c, this)) return TRACE_RETURN (false);
1075 LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (coverage, coverage[0].static_size * count);
1076 return TRACE_RETURN (c->check_array (lookupRecord, lookupRecord[0].static_size, lookupCount));
1079 protected:
1080 USHORT format; /* Format identifier--format = 3 */
1081 USHORT glyphCount; /* Number of glyphs in the input glyph
1082 * sequence */
1083 USHORT lookupCount; /* Number of LookupRecords */
1084 OffsetTo<Coverage>
1085 coverage[VAR]; /* Array of offsets to Coverage
1086 * table in glyph sequence order */
1087 LookupRecord lookupRecordX[VAR]; /* Array of LookupRecords--in
1088 * design order */
1089 public:
1090 DEFINE_SIZE_ARRAY2 (6, coverage, lookupRecordX);
1093 struct Context
1095 protected:
1097 inline void closure (hb_closure_context_t *c, closure_lookup_func_t closure_func) const
1099 TRACE_CLOSURE ();
1100 switch (u.format) {
1101 case 1: u.format1.closure (c, closure_func); break;
1102 case 2: u.format2.closure (c, closure_func); break;
1103 case 3: u.format3.closure (c, closure_func); break;
1104 default: break;
1108 inline const Coverage &get_coverage (void) const
1110 switch (u.format) {
1111 case 1: return this + u.format1.coverage;
1112 case 2: return this + u.format2.coverage;
1113 case 3: return this + u.format3.coverage[0];
1114 default:return Null(Coverage);
1118 inline bool would_apply (hb_would_apply_context_t *c) const
1120 switch (u.format) {
1121 case 1: return u.format1.would_apply (c);
1122 case 2: return u.format2.would_apply (c);
1123 case 3: return u.format3.would_apply (c);
1124 default:return false;
1128 inline bool apply (hb_apply_context_t *c, apply_lookup_func_t apply_func) const
1130 TRACE_APPLY ();
1131 switch (u.format) {
1132 case 1: return TRACE_RETURN (u.format1.apply (c, apply_func));
1133 case 2: return TRACE_RETURN (u.format2.apply (c, apply_func));
1134 case 3: return TRACE_RETURN (u.format3.apply (c, apply_func));
1135 default:return TRACE_RETURN (false);
1139 inline bool sanitize (hb_sanitize_context_t *c) {
1140 TRACE_SANITIZE ();
1141 if (!u.format.sanitize (c)) return TRACE_RETURN (false);
1142 switch (u.format) {
1143 case 1: return TRACE_RETURN (u.format1.sanitize (c));
1144 case 2: return TRACE_RETURN (u.format2.sanitize (c));
1145 case 3: return TRACE_RETURN (u.format3.sanitize (c));
1146 default:return TRACE_RETURN (true);
1150 protected:
1151 union {
1152 USHORT format; /* Format identifier */
1153 ContextFormat1 format1;
1154 ContextFormat2 format2;
1155 ContextFormat3 format3;
1156 } u;
1160 /* Chaining Contextual lookups */
1162 struct ChainContextClosureLookupContext
1164 ContextClosureFuncs funcs;
1165 const void *intersects_data[3];
1168 struct ChainContextApplyLookupContext
1170 ContextApplyFuncs funcs;
1171 const void *match_data[3];
1174 static inline void chain_context_closure_lookup (hb_closure_context_t *c,
1175 unsigned int backtrackCount,
1176 const USHORT backtrack[],
1177 unsigned int inputCount, /* Including the first glyph (not matched) */
1178 const USHORT input[], /* Array of input values--start with second glyph */
1179 unsigned int lookaheadCount,
1180 const USHORT lookahead[],
1181 unsigned int lookupCount,
1182 const LookupRecord lookupRecord[],
1183 ChainContextClosureLookupContext &lookup_context)
1185 if (intersects_array (c,
1186 backtrackCount, backtrack,
1187 lookup_context.funcs.intersects, lookup_context.intersects_data[0])
1188 && intersects_array (c,
1189 inputCount ? inputCount - 1 : 0, input,
1190 lookup_context.funcs.intersects, lookup_context.intersects_data[1])
1191 && intersects_array (c,
1192 lookaheadCount, lookahead,
1193 lookup_context.funcs.intersects, lookup_context.intersects_data[2]))
1194 closure_lookup (c,
1195 lookupCount, lookupRecord,
1196 lookup_context.funcs.closure);
1199 static inline bool chain_context_would_apply_lookup (hb_would_apply_context_t *c,
1200 unsigned int backtrackCount,
1201 const USHORT backtrack[],
1202 unsigned int inputCount, /* Including the first glyph (not matched) */
1203 const USHORT input[], /* Array of input values--start with second glyph */
1204 unsigned int lookaheadCount,
1205 const USHORT lookahead[],
1206 unsigned int lookupCount,
1207 const LookupRecord lookupRecord[],
1208 ChainContextApplyLookupContext &lookup_context)
1210 return (c->zero_context ? !backtrackCount && !lookaheadCount : true)
1211 && would_match_input (c,
1212 inputCount, input,
1213 lookup_context.funcs.match, lookup_context.match_data[1]);
1216 static inline bool chain_context_apply_lookup (hb_apply_context_t *c,
1217 unsigned int backtrackCount,
1218 const USHORT backtrack[],
1219 unsigned int inputCount, /* Including the first glyph (not matched) */
1220 const USHORT input[], /* Array of input values--start with second glyph */
1221 unsigned int lookaheadCount,
1222 const USHORT lookahead[],
1223 unsigned int lookupCount,
1224 const LookupRecord lookupRecord[],
1225 ChainContextApplyLookupContext &lookup_context)
1227 unsigned int lookahead_offset;
1228 return match_input (c,
1229 inputCount, input,
1230 lookup_context.funcs.match, lookup_context.match_data[1],
1231 &lookahead_offset)
1232 && match_backtrack (c,
1233 backtrackCount, backtrack,
1234 lookup_context.funcs.match, lookup_context.match_data[0])
1235 && match_lookahead (c,
1236 lookaheadCount, lookahead,
1237 lookup_context.funcs.match, lookup_context.match_data[2],
1238 lookahead_offset)
1239 && apply_lookup (c,
1240 inputCount,
1241 lookupCount, lookupRecord,
1242 lookup_context.funcs.apply);
1245 struct ChainRule
1247 friend struct ChainRuleSet;
1249 private:
1251 inline void closure (hb_closure_context_t *c, ChainContextClosureLookupContext &lookup_context) const
1253 TRACE_CLOSURE ();
1254 const HeadlessArrayOf<USHORT> &input = StructAfter<HeadlessArrayOf<USHORT> > (backtrack);
1255 const ArrayOf<USHORT> &lookahead = StructAfter<ArrayOf<USHORT> > (input);
1256 const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead);
1257 chain_context_closure_lookup (c,
1258 backtrack.len, backtrack.array,
1259 input.len, input.array,
1260 lookahead.len, lookahead.array,
1261 lookup.len, lookup.array,
1262 lookup_context);
1265 inline bool would_apply (hb_would_apply_context_t *c, ChainContextApplyLookupContext &lookup_context) const
1267 TRACE_WOULD_APPLY ();
1268 const HeadlessArrayOf<USHORT> &input = StructAfter<HeadlessArrayOf<USHORT> > (backtrack);
1269 const ArrayOf<USHORT> &lookahead = StructAfter<ArrayOf<USHORT> > (input);
1270 const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead);
1271 return TRACE_RETURN (chain_context_would_apply_lookup (c,
1272 backtrack.len, backtrack.array,
1273 input.len, input.array,
1274 lookahead.len, lookahead.array, lookup.len,
1275 lookup.array, lookup_context));
1278 inline bool apply (hb_apply_context_t *c, ChainContextApplyLookupContext &lookup_context) const
1280 TRACE_APPLY ();
1281 const HeadlessArrayOf<USHORT> &input = StructAfter<HeadlessArrayOf<USHORT> > (backtrack);
1282 const ArrayOf<USHORT> &lookahead = StructAfter<ArrayOf<USHORT> > (input);
1283 const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead);
1284 return TRACE_RETURN (chain_context_apply_lookup (c,
1285 backtrack.len, backtrack.array,
1286 input.len, input.array,
1287 lookahead.len, lookahead.array, lookup.len,
1288 lookup.array, lookup_context));
1291 public:
1292 inline bool sanitize (hb_sanitize_context_t *c) {
1293 TRACE_SANITIZE ();
1294 if (!backtrack.sanitize (c)) return TRACE_RETURN (false);
1295 HeadlessArrayOf<USHORT> &input = StructAfter<HeadlessArrayOf<USHORT> > (backtrack);
1296 if (!input.sanitize (c)) return TRACE_RETURN (false);
1297 ArrayOf<USHORT> &lookahead = StructAfter<ArrayOf<USHORT> > (input);
1298 if (!lookahead.sanitize (c)) return TRACE_RETURN (false);
1299 ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead);
1300 return TRACE_RETURN (lookup.sanitize (c));
1303 protected:
1304 ArrayOf<USHORT>
1305 backtrack; /* Array of backtracking values
1306 * (to be matched before the input
1307 * sequence) */
1308 HeadlessArrayOf<USHORT>
1309 inputX; /* Array of input values (start with
1310 * second glyph) */
1311 ArrayOf<USHORT>
1312 lookaheadX; /* Array of lookahead values's (to be
1313 * matched after the input sequence) */
1314 ArrayOf<LookupRecord>
1315 lookupX; /* Array of LookupRecords--in
1316 * design order) */
1317 public:
1318 DEFINE_SIZE_MIN (8);
1321 struct ChainRuleSet
1323 inline void closure (hb_closure_context_t *c, ChainContextClosureLookupContext &lookup_context) const
1325 TRACE_CLOSURE ();
1326 unsigned int num_rules = rule.len;
1327 for (unsigned int i = 0; i < num_rules; i++)
1328 (this+rule[i]).closure (c, lookup_context);
1331 inline bool would_apply (hb_would_apply_context_t *c, ChainContextApplyLookupContext &lookup_context) const
1333 TRACE_WOULD_APPLY ();
1334 unsigned int num_rules = rule.len;
1335 for (unsigned int i = 0; i < num_rules; i++)
1336 if ((this+rule[i]).would_apply (c, lookup_context))
1337 return TRACE_RETURN (true);
1339 return TRACE_RETURN (false);
1342 inline bool apply (hb_apply_context_t *c, ChainContextApplyLookupContext &lookup_context) const
1344 TRACE_APPLY ();
1345 unsigned int num_rules = rule.len;
1346 for (unsigned int i = 0; i < num_rules; i++)
1347 if ((this+rule[i]).apply (c, lookup_context))
1348 return TRACE_RETURN (true);
1350 return TRACE_RETURN (false);
1353 inline bool sanitize (hb_sanitize_context_t *c) {
1354 TRACE_SANITIZE ();
1355 return TRACE_RETURN (rule.sanitize (c, this));
1358 protected:
1359 OffsetArrayOf<ChainRule>
1360 rule; /* Array of ChainRule tables
1361 * ordered by preference */
1362 public:
1363 DEFINE_SIZE_ARRAY (2, rule);
1366 struct ChainContextFormat1
1368 friend struct ChainContext;
1370 private:
1372 inline void closure (hb_closure_context_t *c, closure_lookup_func_t closure_func) const
1374 TRACE_CLOSURE ();
1375 const Coverage &cov = (this+coverage);
1377 struct ChainContextClosureLookupContext lookup_context = {
1378 {intersects_glyph, closure_func},
1379 {NULL, NULL, NULL}
1382 unsigned int count = ruleSet.len;
1383 for (unsigned int i = 0; i < count; i++)
1384 if (cov.intersects_coverage (c->glyphs, i)) {
1385 const ChainRuleSet &rule_set = this+ruleSet[i];
1386 rule_set.closure (c, lookup_context);
1390 inline bool would_apply (hb_would_apply_context_t *c) const
1392 TRACE_WOULD_APPLY ();
1394 const ChainRuleSet &rule_set = this+ruleSet[(this+coverage) (c->glyphs[0])];
1395 struct ChainContextApplyLookupContext lookup_context = {
1396 {match_glyph, NULL},
1397 {NULL, NULL, NULL}
1399 return TRACE_RETURN (rule_set.would_apply (c, lookup_context));
1402 inline bool apply (hb_apply_context_t *c, apply_lookup_func_t apply_func) const
1404 TRACE_APPLY ();
1405 unsigned int index = (this+coverage) (c->buffer->cur().codepoint);
1406 if (likely (index == NOT_COVERED)) return TRACE_RETURN (false);
1408 const ChainRuleSet &rule_set = this+ruleSet[index];
1409 struct ChainContextApplyLookupContext lookup_context = {
1410 {match_glyph, apply_func},
1411 {NULL, NULL, NULL}
1413 return TRACE_RETURN (rule_set.apply (c, lookup_context));
1416 inline bool sanitize (hb_sanitize_context_t *c) {
1417 TRACE_SANITIZE ();
1418 return TRACE_RETURN (coverage.sanitize (c, this) && ruleSet.sanitize (c, this));
1421 protected:
1422 USHORT format; /* Format identifier--format = 1 */
1423 OffsetTo<Coverage>
1424 coverage; /* Offset to Coverage table--from
1425 * beginning of table */
1426 OffsetArrayOf<ChainRuleSet>
1427 ruleSet; /* Array of ChainRuleSet tables
1428 * ordered by Coverage Index */
1429 public:
1430 DEFINE_SIZE_ARRAY (6, ruleSet);
1433 struct ChainContextFormat2
1435 friend struct ChainContext;
1437 private:
1439 inline void closure (hb_closure_context_t *c, closure_lookup_func_t closure_func) const
1441 TRACE_CLOSURE ();
1442 if (!(this+coverage).intersects (c->glyphs))
1443 return;
1445 const ClassDef &backtrack_class_def = this+backtrackClassDef;
1446 const ClassDef &input_class_def = this+inputClassDef;
1447 const ClassDef &lookahead_class_def = this+lookaheadClassDef;
1449 struct ChainContextClosureLookupContext lookup_context = {
1450 {intersects_class, closure_func},
1451 {&backtrack_class_def,
1452 &input_class_def,
1453 &lookahead_class_def}
1456 unsigned int count = ruleSet.len;
1457 for (unsigned int i = 0; i < count; i++)
1458 if (input_class_def.intersects_class (c->glyphs, i)) {
1459 const ChainRuleSet &rule_set = this+ruleSet[i];
1460 rule_set.closure (c, lookup_context);
1464 inline bool would_apply (hb_would_apply_context_t *c) const
1466 TRACE_WOULD_APPLY ();
1468 const ClassDef &input_class_def = this+inputClassDef;
1470 unsigned int index = input_class_def (c->glyphs[0]);
1471 const ChainRuleSet &rule_set = this+ruleSet[index];
1472 struct ChainContextApplyLookupContext lookup_context = {
1473 {match_class, NULL},
1474 {NULL, &input_class_def, NULL}
1476 return TRACE_RETURN (rule_set.would_apply (c, lookup_context));
1479 inline bool apply (hb_apply_context_t *c, apply_lookup_func_t apply_func) const
1481 TRACE_APPLY ();
1482 unsigned int index = (this+coverage) (c->buffer->cur().codepoint);
1483 if (likely (index == NOT_COVERED)) return TRACE_RETURN (false);
1485 const ClassDef &backtrack_class_def = this+backtrackClassDef;
1486 const ClassDef &input_class_def = this+inputClassDef;
1487 const ClassDef &lookahead_class_def = this+lookaheadClassDef;
1489 index = input_class_def (c->buffer->cur().codepoint);
1490 const ChainRuleSet &rule_set = this+ruleSet[index];
1491 struct ChainContextApplyLookupContext lookup_context = {
1492 {match_class, apply_func},
1493 {&backtrack_class_def,
1494 &input_class_def,
1495 &lookahead_class_def}
1497 return TRACE_RETURN (rule_set.apply (c, lookup_context));
1500 inline bool sanitize (hb_sanitize_context_t *c) {
1501 TRACE_SANITIZE ();
1502 return TRACE_RETURN (coverage.sanitize (c, this) && backtrackClassDef.sanitize (c, this) &&
1503 inputClassDef.sanitize (c, this) && lookaheadClassDef.sanitize (c, this) &&
1504 ruleSet.sanitize (c, this));
1507 protected:
1508 USHORT format; /* Format identifier--format = 2 */
1509 OffsetTo<Coverage>
1510 coverage; /* Offset to Coverage table--from
1511 * beginning of table */
1512 OffsetTo<ClassDef>
1513 backtrackClassDef; /* Offset to glyph ClassDef table
1514 * containing backtrack sequence
1515 * data--from beginning of table */
1516 OffsetTo<ClassDef>
1517 inputClassDef; /* Offset to glyph ClassDef
1518 * table containing input sequence
1519 * data--from beginning of table */
1520 OffsetTo<ClassDef>
1521 lookaheadClassDef; /* Offset to glyph ClassDef table
1522 * containing lookahead sequence
1523 * data--from beginning of table */
1524 OffsetArrayOf<ChainRuleSet>
1525 ruleSet; /* Array of ChainRuleSet tables
1526 * ordered by class */
1527 public:
1528 DEFINE_SIZE_ARRAY (12, ruleSet);
1531 struct ChainContextFormat3
1533 friend struct ChainContext;
1535 private:
1537 inline void closure (hb_closure_context_t *c, closure_lookup_func_t closure_func) const
1539 TRACE_CLOSURE ();
1540 const OffsetArrayOf<Coverage> &input = StructAfter<OffsetArrayOf<Coverage> > (backtrack);
1542 if (!(this+input[0]).intersects (c->glyphs))
1543 return;
1545 const OffsetArrayOf<Coverage> &lookahead = StructAfter<OffsetArrayOf<Coverage> > (input);
1546 const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead);
1547 struct ChainContextClosureLookupContext lookup_context = {
1548 {intersects_coverage, closure_func},
1549 {this, this, this}
1551 chain_context_closure_lookup (c,
1552 backtrack.len, (const USHORT *) backtrack.array,
1553 input.len, (const USHORT *) input.array + 1,
1554 lookahead.len, (const USHORT *) lookahead.array,
1555 lookup.len, lookup.array,
1556 lookup_context);
1559 inline const Coverage &get_coverage (void) const
1561 const OffsetArrayOf<Coverage> &input = StructAfter<OffsetArrayOf<Coverage> > (backtrack);
1562 return this+input[0];
1565 inline bool would_apply (hb_would_apply_context_t *c) const
1567 TRACE_WOULD_APPLY ();
1569 const OffsetArrayOf<Coverage> &input = StructAfter<OffsetArrayOf<Coverage> > (backtrack);
1570 const OffsetArrayOf<Coverage> &lookahead = StructAfter<OffsetArrayOf<Coverage> > (input);
1571 const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead);
1572 struct ChainContextApplyLookupContext lookup_context = {
1573 {match_coverage, NULL},
1574 {this, this, this}
1576 return TRACE_RETURN (chain_context_would_apply_lookup (c,
1577 backtrack.len, (const USHORT *) backtrack.array,
1578 input.len, (const USHORT *) input.array + 1,
1579 lookahead.len, (const USHORT *) lookahead.array,
1580 lookup.len, lookup.array, lookup_context));
1583 inline bool apply (hb_apply_context_t *c, apply_lookup_func_t apply_func) const
1585 TRACE_APPLY ();
1586 const OffsetArrayOf<Coverage> &input = StructAfter<OffsetArrayOf<Coverage> > (backtrack);
1588 unsigned int index = (this+input[0]) (c->buffer->cur().codepoint);
1589 if (likely (index == NOT_COVERED)) return TRACE_RETURN (false);
1591 const OffsetArrayOf<Coverage> &lookahead = StructAfter<OffsetArrayOf<Coverage> > (input);
1592 const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead);
1593 struct ChainContextApplyLookupContext lookup_context = {
1594 {match_coverage, apply_func},
1595 {this, this, this}
1597 return TRACE_RETURN (chain_context_apply_lookup (c,
1598 backtrack.len, (const USHORT *) backtrack.array,
1599 input.len, (const USHORT *) input.array + 1,
1600 lookahead.len, (const USHORT *) lookahead.array,
1601 lookup.len, lookup.array, lookup_context));
1604 inline bool sanitize (hb_sanitize_context_t *c) {
1605 TRACE_SANITIZE ();
1606 if (!backtrack.sanitize (c, this)) return TRACE_RETURN (false);
1607 OffsetArrayOf<Coverage> &input = StructAfter<OffsetArrayOf<Coverage> > (backtrack);
1608 if (!input.sanitize (c, this)) return TRACE_RETURN (false);
1609 OffsetArrayOf<Coverage> &lookahead = StructAfter<OffsetArrayOf<Coverage> > (input);
1610 if (!lookahead.sanitize (c, this)) return TRACE_RETURN (false);
1611 ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead);
1612 return TRACE_RETURN (lookup.sanitize (c));
1615 protected:
1616 USHORT format; /* Format identifier--format = 3 */
1617 OffsetArrayOf<Coverage>
1618 backtrack; /* Array of coverage tables
1619 * in backtracking sequence, in glyph
1620 * sequence order */
1621 OffsetArrayOf<Coverage>
1622 inputX ; /* Array of coverage
1623 * tables in input sequence, in glyph
1624 * sequence order */
1625 OffsetArrayOf<Coverage>
1626 lookaheadX; /* Array of coverage tables
1627 * in lookahead sequence, in glyph
1628 * sequence order */
1629 ArrayOf<LookupRecord>
1630 lookupX; /* Array of LookupRecords--in
1631 * design order) */
1632 public:
1633 DEFINE_SIZE_MIN (10);
1636 struct ChainContext
1638 protected:
1640 inline void closure (hb_closure_context_t *c, closure_lookup_func_t closure_func) const
1642 TRACE_CLOSURE ();
1643 switch (u.format) {
1644 case 1: u.format1.closure (c, closure_func); break;
1645 case 2: u.format2.closure (c, closure_func); break;
1646 case 3: u.format3.closure (c, closure_func); break;
1647 default: break;
1651 inline const Coverage &get_coverage (void) const
1653 switch (u.format) {
1654 case 1: return this + u.format1.coverage;
1655 case 2: return this + u.format2.coverage;
1656 case 3: return u.format3.get_coverage ();
1657 default:return Null(Coverage);
1661 inline bool would_apply (hb_would_apply_context_t *c) const
1663 switch (u.format) {
1664 case 1: return u.format1.would_apply (c);
1665 case 2: return u.format2.would_apply (c);
1666 case 3: return u.format3.would_apply (c);
1667 default:return false;
1671 inline bool apply (hb_apply_context_t *c, apply_lookup_func_t apply_func) const
1673 TRACE_APPLY ();
1674 switch (u.format) {
1675 case 1: return TRACE_RETURN (u.format1.apply (c, apply_func));
1676 case 2: return TRACE_RETURN (u.format2.apply (c, apply_func));
1677 case 3: return TRACE_RETURN (u.format3.apply (c, apply_func));
1678 default:return TRACE_RETURN (false);
1682 inline bool sanitize (hb_sanitize_context_t *c) {
1683 TRACE_SANITIZE ();
1684 if (!u.format.sanitize (c)) return TRACE_RETURN (false);
1685 switch (u.format) {
1686 case 1: return TRACE_RETURN (u.format1.sanitize (c));
1687 case 2: return TRACE_RETURN (u.format2.sanitize (c));
1688 case 3: return TRACE_RETURN (u.format3.sanitize (c));
1689 default:return TRACE_RETURN (true);
1693 protected:
1694 union {
1695 USHORT format; /* Format identifier */
1696 ChainContextFormat1 format1;
1697 ChainContextFormat2 format2;
1698 ChainContextFormat3 format3;
1699 } u;
1703 struct ExtensionFormat1
1705 friend struct Extension;
1707 protected:
1708 inline unsigned int get_type (void) const { return extensionLookupType; }
1709 inline unsigned int get_offset (void) const { return extensionOffset; }
1711 inline bool sanitize (hb_sanitize_context_t *c) {
1712 TRACE_SANITIZE ();
1713 return TRACE_RETURN (c->check_struct (this));
1716 protected:
1717 USHORT format; /* Format identifier. Set to 1. */
1718 USHORT extensionLookupType; /* Lookup type of subtable referenced
1719 * by ExtensionOffset (i.e. the
1720 * extension subtable). */
1721 ULONG extensionOffset; /* Offset to the extension subtable,
1722 * of lookup type subtable. */
1723 public:
1724 DEFINE_SIZE_STATIC (8);
1727 struct Extension
1729 inline unsigned int get_type (void) const
1731 switch (u.format) {
1732 case 1: return u.format1.get_type ();
1733 default:return 0;
1736 inline unsigned int get_offset (void) const
1738 switch (u.format) {
1739 case 1: return u.format1.get_offset ();
1740 default:return 0;
1744 inline bool sanitize (hb_sanitize_context_t *c) {
1745 TRACE_SANITIZE ();
1746 if (!u.format.sanitize (c)) return TRACE_RETURN (false);
1747 switch (u.format) {
1748 case 1: return TRACE_RETURN (u.format1.sanitize (c));
1749 default:return TRACE_RETURN (true);
1753 protected:
1754 union {
1755 USHORT format; /* Format identifier */
1756 ExtensionFormat1 format1;
1757 } u;
1762 * GSUB/GPOS Common
1765 struct GSUBGPOS
1767 static const hb_tag_t GSUBTag = HB_OT_TAG_GSUB;
1768 static const hb_tag_t GPOSTag = HB_OT_TAG_GPOS;
1770 inline unsigned int get_script_count (void) const
1771 { return (this+scriptList).len; }
1772 inline const Tag& get_script_tag (unsigned int i) const
1773 { return (this+scriptList).get_tag (i); }
1774 inline unsigned int get_script_tags (unsigned int start_offset,
1775 unsigned int *script_count /* IN/OUT */,
1776 hb_tag_t *script_tags /* OUT */) const
1777 { return (this+scriptList).get_tags (start_offset, script_count, script_tags); }
1778 inline const Script& get_script (unsigned int i) const
1779 { return (this+scriptList)[i]; }
1780 inline bool find_script_index (hb_tag_t tag, unsigned int *index) const
1781 { return (this+scriptList).find_index (tag, index); }
1783 inline unsigned int get_feature_count (void) const
1784 { return (this+featureList).len; }
1785 inline const Tag& get_feature_tag (unsigned int i) const
1786 { return (this+featureList).get_tag (i); }
1787 inline unsigned int get_feature_tags (unsigned int start_offset,
1788 unsigned int *feature_count /* IN/OUT */,
1789 hb_tag_t *feature_tags /* OUT */) const
1790 { return (this+featureList).get_tags (start_offset, feature_count, feature_tags); }
1791 inline const Feature& get_feature (unsigned int i) const
1792 { return (this+featureList)[i]; }
1793 inline bool find_feature_index (hb_tag_t tag, unsigned int *index) const
1794 { return (this+featureList).find_index (tag, index); }
1796 inline unsigned int get_lookup_count (void) const
1797 { return (this+lookupList).len; }
1798 inline const Lookup& get_lookup (unsigned int i) const
1799 { return (this+lookupList)[i]; }
1801 inline bool sanitize (hb_sanitize_context_t *c) {
1802 TRACE_SANITIZE ();
1803 return TRACE_RETURN (version.sanitize (c) && likely (version.major == 1) &&
1804 scriptList.sanitize (c, this) &&
1805 featureList.sanitize (c, this) &&
1806 lookupList.sanitize (c, this));
1809 protected:
1810 FixedVersion version; /* Version of the GSUB/GPOS table--initially set
1811 * to 0x00010000 */
1812 OffsetTo<ScriptList>
1813 scriptList; /* ScriptList table */
1814 OffsetTo<FeatureList>
1815 featureList; /* FeatureList table */
1816 OffsetTo<LookupList>
1817 lookupList; /* LookupList table */
1818 public:
1819 DEFINE_SIZE_STATIC (10);
1823 } // namespace OT
1826 #endif /* HB_OT_LAYOUT_GSUBGPOS_PRIVATE_HH */