2 * Copyright © 2007,2008,2009,2010 Red Hat, Inc.
3 * Copyright © 2010,2012,2013 Google, Inc.
5 * This is part of HarfBuzz, a text shaping library.
7 * Permission is hereby granted, without written agreement and without
8 * license or royalty fees, to use, copy, modify, and distribute this
9 * software and its documentation for any purpose, provided that the
10 * above copyright notice and the following two paragraphs appear in
11 * all copies of this software.
13 * IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR
14 * DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES
15 * ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN
16 * IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
19 * THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING,
20 * BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
21 * FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE PROVIDED HEREUNDER IS
22 * ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO
23 * PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
25 * Red Hat Author(s): Behdad Esfahbod
26 * Google Author(s): Behdad Esfahbod
29 #ifndef HB_OT_LAYOUT_GPOS_TABLE_HH
30 #define HB_OT_LAYOUT_GPOS_TABLE_HH
32 #include "hb-ot-layout-gsubgpos-private.hh"
38 /* buffer **position** var allocations */
39 #define attach_lookback() var.u16[0] /* number of glyphs to go back to attach this glyph to its base */
40 #define cursive_chain() var.i16[1] /* character to which this connects, may be positive or negative */
43 /* Shared Tables: ValueRecord, Anchor Table, and MarkArray */
47 typedef Value ValueRecord
[VAR
];
49 struct ValueFormat
: USHORT
52 xPlacement
= 0x0001u
, /* Includes horizontal adjustment for placement */
53 yPlacement
= 0x0002u
, /* Includes vertical adjustment for placement */
54 xAdvance
= 0x0004u
, /* Includes horizontal adjustment for advance */
55 yAdvance
= 0x0008u
, /* Includes vertical adjustment for advance */
56 xPlaDevice
= 0x0010u
, /* Includes horizontal Device table for placement */
57 yPlaDevice
= 0x0020u
, /* Includes vertical Device table for placement */
58 xAdvDevice
= 0x0040u
, /* Includes horizontal Device table for advance */
59 yAdvDevice
= 0x0080u
, /* Includes vertical Device table for advance */
60 ignored
= 0x0F00u
, /* Was used in TrueType Open for MM fonts */
61 reserved
= 0xF000u
, /* For future use */
63 devices
= 0x00F0u
/* Mask for having any Device table */
66 /* All fields are options. Only those available advance the value pointer. */
68 SHORT xPlacement
; /* Horizontal adjustment for
69 * placement--in design units */
70 SHORT yPlacement
; /* Vertical adjustment for
71 * placement--in design units */
72 SHORT xAdvance
; /* Horizontal adjustment for
73 * advance--in design units (only used
74 * for horizontal writing) */
75 SHORT yAdvance
; /* Vertical adjustment for advance--in
76 * design units (only used for vertical
78 Offset xPlaDevice
; /* Offset to Device table for
79 * horizontal placement--measured from
80 * beginning of PosTable (may be NULL) */
81 Offset yPlaDevice
; /* Offset to Device table for vertical
82 * placement--measured from beginning
83 * of PosTable (may be NULL) */
84 Offset xAdvDevice
; /* Offset to Device table for
85 * horizontal advance--measured from
86 * beginning of PosTable (may be NULL) */
87 Offset yAdvDevice
; /* Offset to Device table for vertical
88 * advance--measured from beginning of
89 * PosTable (may be NULL) */
92 inline unsigned int get_len (void) const
93 { return _hb_popcount32 ((unsigned int) *this); }
94 inline unsigned int get_size (void) const
95 { return get_len () * Value::static_size
; }
97 void apply_value (hb_font_t
*font
,
98 hb_direction_t direction
,
101 hb_glyph_position_t
&glyph_pos
) const
103 unsigned int x_ppem
, y_ppem
;
104 unsigned int format
= *this;
105 hb_bool_t horizontal
= HB_DIRECTION_IS_HORIZONTAL (direction
);
109 if (format
& xPlacement
) glyph_pos
.x_offset
+= font
->em_scale_x (get_short (values
++));
110 if (format
& yPlacement
) glyph_pos
.y_offset
+= font
->em_scale_y (get_short (values
++));
111 if (format
& xAdvance
) {
112 if (likely (horizontal
)) glyph_pos
.x_advance
+= font
->em_scale_x (get_short (values
));
115 /* y_advance values grow downward but font-space grows upward, hence negation */
116 if (format
& yAdvance
) {
117 if (unlikely (!horizontal
)) glyph_pos
.y_advance
-= font
->em_scale_y (get_short (values
));
121 if (!has_device ()) return;
123 x_ppem
= font
->x_ppem
;
124 y_ppem
= font
->y_ppem
;
126 if (!x_ppem
&& !y_ppem
) return;
128 /* pixel -> fractional pixel */
129 if (format
& xPlaDevice
) {
130 if (x_ppem
) glyph_pos
.x_offset
+= (base
+ get_device (values
)).get_x_delta (font
);
133 if (format
& yPlaDevice
) {
134 if (y_ppem
) glyph_pos
.y_offset
+= (base
+ get_device (values
)).get_y_delta (font
);
137 if (format
& xAdvDevice
) {
138 if (horizontal
&& x_ppem
) glyph_pos
.x_advance
+= (base
+ get_device (values
)).get_x_delta (font
);
141 if (format
& yAdvDevice
) {
142 /* y_advance values grow downward but font-space grows upward, hence negation */
143 if (!horizontal
&& y_ppem
) glyph_pos
.y_advance
-= (base
+ get_device (values
)).get_y_delta (font
);
149 inline bool sanitize_value_devices (hb_sanitize_context_t
*c
, void *base
, Value
*values
) {
150 unsigned int format
= *this;
152 if (format
& xPlacement
) values
++;
153 if (format
& yPlacement
) values
++;
154 if (format
& xAdvance
) values
++;
155 if (format
& yAdvance
) values
++;
157 if ((format
& xPlaDevice
) && !get_device (values
++).sanitize (c
, base
)) return false;
158 if ((format
& yPlaDevice
) && !get_device (values
++).sanitize (c
, base
)) return false;
159 if ((format
& xAdvDevice
) && !get_device (values
++).sanitize (c
, base
)) return false;
160 if ((format
& yAdvDevice
) && !get_device (values
++).sanitize (c
, base
)) return false;
165 static inline OffsetTo
<Device
>& get_device (Value
* value
)
166 { return *CastP
<OffsetTo
<Device
> > (value
); }
167 static inline const OffsetTo
<Device
>& get_device (const Value
* value
)
168 { return *CastP
<OffsetTo
<Device
> > (value
); }
170 static inline const SHORT
& get_short (const Value
* value
)
171 { return *CastP
<SHORT
> (value
); }
175 inline bool has_device (void) const {
176 unsigned int format
= *this;
177 return (format
& devices
) != 0;
180 inline bool sanitize_value (hb_sanitize_context_t
*c
, void *base
, Value
*values
) {
181 TRACE_SANITIZE (this);
182 return TRACE_RETURN (c
->check_range (values
, get_size ()) && (!has_device () || sanitize_value_devices (c
, base
, values
)));
185 inline bool sanitize_values (hb_sanitize_context_t
*c
, void *base
, Value
*values
, unsigned int count
) {
186 TRACE_SANITIZE (this);
187 unsigned int len
= get_len ();
189 if (!c
->check_array (values
, get_size (), count
)) return TRACE_RETURN (false);
191 if (!has_device ()) return TRACE_RETURN (true);
193 for (unsigned int i
= 0; i
< count
; i
++) {
194 if (!sanitize_value_devices (c
, base
, values
))
195 return TRACE_RETURN (false);
199 return TRACE_RETURN (true);
202 /* Just sanitize referenced Device tables. Doesn't check the values themselves. */
203 inline bool sanitize_values_stride_unsafe (hb_sanitize_context_t
*c
, void *base
, Value
*values
, unsigned int count
, unsigned int stride
) {
204 TRACE_SANITIZE (this);
206 if (!has_device ()) return TRACE_RETURN (true);
208 for (unsigned int i
= 0; i
< count
; i
++) {
209 if (!sanitize_value_devices (c
, base
, values
))
210 return TRACE_RETURN (false);
214 return TRACE_RETURN (true);
221 inline void get_anchor (hb_font_t
*font
, hb_codepoint_t glyph_id HB_UNUSED
,
222 hb_position_t
*x
, hb_position_t
*y
) const
224 *x
= font
->em_scale_x (xCoordinate
);
225 *y
= font
->em_scale_y (yCoordinate
);
228 inline bool sanitize (hb_sanitize_context_t
*c
) {
229 TRACE_SANITIZE (this);
230 return TRACE_RETURN (c
->check_struct (this));
234 USHORT format
; /* Format identifier--format = 1 */
235 SHORT xCoordinate
; /* Horizontal value--in design units */
236 SHORT yCoordinate
; /* Vertical value--in design units */
238 DEFINE_SIZE_STATIC (6);
243 inline void get_anchor (hb_font_t
*font
, hb_codepoint_t glyph_id
,
244 hb_position_t
*x
, hb_position_t
*y
) const
246 unsigned int x_ppem
= font
->x_ppem
;
247 unsigned int y_ppem
= font
->y_ppem
;
248 hb_position_t cx
, cy
;
251 ret
= (x_ppem
|| y_ppem
) &&
252 font
->get_glyph_contour_point_for_origin (glyph_id
, anchorPoint
, HB_DIRECTION_LTR
, &cx
, &cy
);
253 *x
= ret
&& x_ppem
? cx
: font
->em_scale_x (xCoordinate
);
254 *y
= ret
&& y_ppem
? cy
: font
->em_scale_y (yCoordinate
);
257 inline bool sanitize (hb_sanitize_context_t
*c
) {
258 TRACE_SANITIZE (this);
259 return TRACE_RETURN (c
->check_struct (this));
263 USHORT format
; /* Format identifier--format = 2 */
264 SHORT xCoordinate
; /* Horizontal value--in design units */
265 SHORT yCoordinate
; /* Vertical value--in design units */
266 USHORT anchorPoint
; /* Index to glyph contour point */
268 DEFINE_SIZE_STATIC (8);
273 inline void get_anchor (hb_font_t
*font
, hb_codepoint_t glyph_id HB_UNUSED
,
274 hb_position_t
*x
, hb_position_t
*y
) const
276 *x
= font
->em_scale_x (xCoordinate
);
277 *y
= font
->em_scale_y (yCoordinate
);
280 *x
+= (this+xDeviceTable
).get_x_delta (font
);
282 *y
+= (this+yDeviceTable
).get_x_delta (font
);
285 inline bool sanitize (hb_sanitize_context_t
*c
) {
286 TRACE_SANITIZE (this);
287 return TRACE_RETURN (c
->check_struct (this) && xDeviceTable
.sanitize (c
, this) && yDeviceTable
.sanitize (c
, this));
291 USHORT format
; /* Format identifier--format = 3 */
292 SHORT xCoordinate
; /* Horizontal value--in design units */
293 SHORT yCoordinate
; /* Vertical value--in design units */
295 xDeviceTable
; /* Offset to Device table for X
296 * coordinate-- from beginning of
297 * Anchor table (may be NULL) */
299 yDeviceTable
; /* Offset to Device table for Y
300 * coordinate-- from beginning of
301 * Anchor table (may be NULL) */
303 DEFINE_SIZE_STATIC (10);
308 inline void get_anchor (hb_font_t
*font
, hb_codepoint_t glyph_id
,
309 hb_position_t
*x
, hb_position_t
*y
) const
313 case 1: u
.format1
.get_anchor (font
, glyph_id
, x
, y
); return;
314 case 2: u
.format2
.get_anchor (font
, glyph_id
, x
, y
); return;
315 case 3: u
.format3
.get_anchor (font
, glyph_id
, x
, y
); return;
320 inline bool sanitize (hb_sanitize_context_t
*c
) {
321 TRACE_SANITIZE (this);
322 if (!u
.format
.sanitize (c
)) return TRACE_RETURN (false);
324 case 1: return TRACE_RETURN (u
.format1
.sanitize (c
));
325 case 2: return TRACE_RETURN (u
.format2
.sanitize (c
));
326 case 3: return TRACE_RETURN (u
.format3
.sanitize (c
));
327 default:return TRACE_RETURN (true);
333 USHORT format
; /* Format identifier */
334 AnchorFormat1 format1
;
335 AnchorFormat2 format2
;
336 AnchorFormat3 format3
;
339 DEFINE_SIZE_UNION (2, format
);
345 inline const Anchor
& get_anchor (unsigned int row
, unsigned int col
, unsigned int cols
, bool *found
) const {
347 if (unlikely (row
>= rows
|| col
>= cols
)) return Null(Anchor
);
348 *found
= !matrixZ
[row
* cols
+ col
].is_null ();
349 return this+matrixZ
[row
* cols
+ col
];
352 inline bool sanitize (hb_sanitize_context_t
*c
, unsigned int cols
) {
353 TRACE_SANITIZE (this);
354 if (!c
->check_struct (this)) return TRACE_RETURN (false);
355 if (unlikely (rows
> 0 && cols
>= ((unsigned int) -1) / rows
)) return TRACE_RETURN (false);
356 unsigned int count
= rows
* cols
;
357 if (!c
->check_array (matrixZ
, matrixZ
[0].static_size
, count
)) return TRACE_RETURN (false);
358 for (unsigned int i
= 0; i
< count
; i
++)
359 if (!matrixZ
[i
].sanitize (c
, this)) return TRACE_RETURN (false);
360 return TRACE_RETURN (true);
363 USHORT rows
; /* Number of rows */
366 matrixZ
[VAR
]; /* Matrix of offsets to Anchor tables--
367 * from beginning of AnchorMatrix table */
369 DEFINE_SIZE_ARRAY (2, matrixZ
);
375 friend struct MarkArray
;
377 inline bool sanitize (hb_sanitize_context_t
*c
, void *base
) {
378 TRACE_SANITIZE (this);
379 return TRACE_RETURN (c
->check_struct (this) && markAnchor
.sanitize (c
, base
));
383 USHORT klass
; /* Class defined for this mark */
385 markAnchor
; /* Offset to Anchor table--from
386 * beginning of MarkArray table */
388 DEFINE_SIZE_STATIC (4);
391 struct MarkArray
: ArrayOf
<MarkRecord
> /* Array of MarkRecords--in Coverage order */
393 inline bool apply (hb_apply_context_t
*c
,
394 unsigned int mark_index
, unsigned int glyph_index
,
395 const AnchorMatrix
&anchors
, unsigned int class_count
,
396 unsigned int glyph_pos
) const
399 hb_buffer_t
*buffer
= c
->buffer
;
400 const MarkRecord
&record
= ArrayOf
<MarkRecord
>::operator[](mark_index
);
401 unsigned int mark_class
= record
.klass
;
403 const Anchor
& mark_anchor
= this + record
.markAnchor
;
405 const Anchor
& glyph_anchor
= anchors
.get_anchor (glyph_index
, mark_class
, class_count
, &found
);
406 /* If this subtable doesn't have an anchor for this base and this class,
407 * return false such that the subsequent subtables have a chance at it. */
408 if (unlikely (!found
)) return TRACE_RETURN (false);
410 hb_position_t mark_x
, mark_y
, base_x
, base_y
;
412 mark_anchor
.get_anchor (c
->font
, buffer
->cur().codepoint
, &mark_x
, &mark_y
);
413 glyph_anchor
.get_anchor (c
->font
, buffer
->info
[glyph_pos
].codepoint
, &base_x
, &base_y
);
415 hb_glyph_position_t
&o
= buffer
->cur_pos();
416 o
.x_offset
= base_x
- mark_x
;
417 o
.y_offset
= base_y
- mark_y
;
418 o
.attach_lookback() = buffer
->idx
- glyph_pos
;
421 return TRACE_RETURN (true);
424 inline bool sanitize (hb_sanitize_context_t
*c
) {
425 TRACE_SANITIZE (this);
426 return TRACE_RETURN (ArrayOf
<MarkRecord
>::sanitize (c
, this));
433 struct SinglePosFormat1
435 inline void collect_glyphs (hb_collect_glyphs_context_t
*c
) const
437 TRACE_COLLECT_GLYPHS (this);
438 (this+coverage
).add_coverage (c
->input
);
441 inline const Coverage
&get_coverage (void) const
443 return this+coverage
;
446 inline bool apply (hb_apply_context_t
*c
) const
449 hb_buffer_t
*buffer
= c
->buffer
;
450 unsigned int index
= (this+coverage
).get_coverage (buffer
->cur().codepoint
);
451 if (likely (index
== NOT_COVERED
)) return TRACE_RETURN (false);
453 valueFormat
.apply_value (c
->font
, c
->direction
, this,
454 values
, buffer
->cur_pos());
457 return TRACE_RETURN (true);
460 inline bool sanitize (hb_sanitize_context_t
*c
) {
461 TRACE_SANITIZE (this);
462 return TRACE_RETURN (c
->check_struct (this) && coverage
.sanitize (c
, this) && valueFormat
.sanitize_value (c
, this, values
));
466 USHORT format
; /* Format identifier--format = 1 */
468 coverage
; /* Offset to Coverage table--from
469 * beginning of subtable */
470 ValueFormat valueFormat
; /* Defines the types of data in the
472 ValueRecord values
; /* Defines positioning
473 * value(s)--applied to all glyphs in
474 * the Coverage table */
476 DEFINE_SIZE_ARRAY (6, values
);
479 struct SinglePosFormat2
481 inline void collect_glyphs (hb_collect_glyphs_context_t
*c
) const
483 TRACE_COLLECT_GLYPHS (this);
484 (this+coverage
).add_coverage (c
->input
);
487 inline const Coverage
&get_coverage (void) const
489 return this+coverage
;
492 inline bool apply (hb_apply_context_t
*c
) const
495 hb_buffer_t
*buffer
= c
->buffer
;
496 unsigned int index
= (this+coverage
).get_coverage (buffer
->cur().codepoint
);
497 if (likely (index
== NOT_COVERED
)) return TRACE_RETURN (false);
499 if (likely (index
>= valueCount
)) return TRACE_RETURN (false);
501 valueFormat
.apply_value (c
->font
, c
->direction
, this,
502 &values
[index
* valueFormat
.get_len ()],
506 return TRACE_RETURN (true);
509 inline bool sanitize (hb_sanitize_context_t
*c
) {
510 TRACE_SANITIZE (this);
511 return TRACE_RETURN (c
->check_struct (this) && coverage
.sanitize (c
, this) && valueFormat
.sanitize_values (c
, this, values
, valueCount
));
515 USHORT format
; /* Format identifier--format = 2 */
517 coverage
; /* Offset to Coverage table--from
518 * beginning of subtable */
519 ValueFormat valueFormat
; /* Defines the types of data in the
521 USHORT valueCount
; /* Number of ValueRecords */
522 ValueRecord values
; /* Array of ValueRecords--positioning
523 * values applied to glyphs */
525 DEFINE_SIZE_ARRAY (8, values
);
530 template <typename context_t
>
531 inline typename
context_t::return_t
dispatch (context_t
*c
) const
533 TRACE_DISPATCH (this, u
.format
);
535 case 1: return TRACE_RETURN (c
->dispatch (u
.format1
));
536 case 2: return TRACE_RETURN (c
->dispatch (u
.format2
));
537 default:return TRACE_RETURN (c
->default_return_value ());
541 inline bool sanitize (hb_sanitize_context_t
*c
) {
542 TRACE_SANITIZE (this);
543 if (!u
.format
.sanitize (c
)) return TRACE_RETURN (false);
545 case 1: return TRACE_RETURN (u
.format1
.sanitize (c
));
546 case 2: return TRACE_RETURN (u
.format2
.sanitize (c
));
547 default:return TRACE_RETURN (true);
553 USHORT format
; /* Format identifier */
554 SinglePosFormat1 format1
;
555 SinglePosFormat2 format2
;
560 struct PairValueRecord
562 friend struct PairSet
;
565 GlyphID secondGlyph
; /* GlyphID of second glyph in the
566 * pair--first glyph is listed in the
568 ValueRecord values
; /* Positioning data for the first glyph
569 * followed by for second glyph */
571 DEFINE_SIZE_ARRAY (2, values
);
576 friend struct PairPosFormat1
;
578 inline void collect_glyphs (hb_collect_glyphs_context_t
*c
,
579 const ValueFormat
*valueFormats
) const
581 TRACE_COLLECT_GLYPHS (this);
582 unsigned int len1
= valueFormats
[0].get_len ();
583 unsigned int len2
= valueFormats
[1].get_len ();
584 unsigned int record_size
= USHORT::static_size
* (1 + len1
+ len2
);
586 const PairValueRecord
*record
= CastP
<PairValueRecord
> (arrayZ
);
587 unsigned int count
= len
;
588 for (unsigned int i
= 0; i
< count
; i
++)
590 c
->input
->add (record
->secondGlyph
);
591 record
= &StructAtOffset
<PairValueRecord
> (record
, record_size
);
595 inline bool apply (hb_apply_context_t
*c
,
596 const ValueFormat
*valueFormats
,
597 unsigned int pos
) const
600 hb_buffer_t
*buffer
= c
->buffer
;
601 unsigned int len1
= valueFormats
[0].get_len ();
602 unsigned int len2
= valueFormats
[1].get_len ();
603 unsigned int record_size
= USHORT::static_size
* (1 + len1
+ len2
);
605 const PairValueRecord
*record_array
= CastP
<PairValueRecord
> (arrayZ
);
606 unsigned int count
= len
;
608 /* Hand-coded bsearch. */
609 if (unlikely (!count
))
610 return TRACE_RETURN (false);
611 hb_codepoint_t x
= buffer
->info
[pos
].codepoint
;
612 int min
= 0, max
= (int) count
- 1;
615 int mid
= (min
+ max
) / 2;
616 const PairValueRecord
*record
= &StructAtOffset
<PairValueRecord
> (record_array
, record_size
* mid
);
617 hb_codepoint_t mid_x
= record
->secondGlyph
;
624 valueFormats
[0].apply_value (c
->font
, c
->direction
, this,
625 &record
->values
[0], buffer
->cur_pos());
626 valueFormats
[1].apply_value (c
->font
, c
->direction
, this,
627 &record
->values
[len1
], buffer
->pos
[pos
]);
631 return TRACE_RETURN (true);
635 return TRACE_RETURN (false);
638 struct sanitize_closure_t
{
640 ValueFormat
*valueFormats
;
641 unsigned int len1
; /* valueFormats[0].get_len() */
642 unsigned int stride
; /* 1 + len1 + len2 */
645 inline bool sanitize (hb_sanitize_context_t
*c
, const sanitize_closure_t
*closure
) {
646 TRACE_SANITIZE (this);
647 if (!(c
->check_struct (this)
648 && c
->check_array (arrayZ
, USHORT::static_size
* closure
->stride
, len
))) return TRACE_RETURN (false);
650 unsigned int count
= len
;
651 PairValueRecord
*record
= CastP
<PairValueRecord
> (arrayZ
);
652 return TRACE_RETURN (closure
->valueFormats
[0].sanitize_values_stride_unsafe (c
, closure
->base
, &record
->values
[0], count
, closure
->stride
)
653 && closure
->valueFormats
[1].sanitize_values_stride_unsafe (c
, closure
->base
, &record
->values
[closure
->len1
], count
, closure
->stride
));
657 USHORT len
; /* Number of PairValueRecords */
658 USHORT arrayZ
[VAR
]; /* Array of PairValueRecords--ordered
659 * by GlyphID of the second glyph */
661 DEFINE_SIZE_ARRAY (2, arrayZ
);
664 struct PairPosFormat1
666 inline void collect_glyphs (hb_collect_glyphs_context_t
*c
) const
668 TRACE_COLLECT_GLYPHS (this);
669 (this+coverage
).add_coverage (c
->input
);
670 unsigned int count
= pairSet
.len
;
671 for (unsigned int i
= 0; i
< count
; i
++)
672 (this+pairSet
[i
]).collect_glyphs (c
, &valueFormat1
);
675 inline const Coverage
&get_coverage (void) const
677 return this+coverage
;
680 inline bool apply (hb_apply_context_t
*c
) const
683 hb_buffer_t
*buffer
= c
->buffer
;
684 hb_apply_context_t::skipping_forward_iterator_t
skippy_iter (c
, buffer
->idx
, 1);
685 if (skippy_iter
.has_no_chance ()) return TRACE_RETURN (false);
687 unsigned int index
= (this+coverage
).get_coverage (buffer
->cur().codepoint
);
688 if (likely (index
== NOT_COVERED
)) return TRACE_RETURN (false);
690 if (!skippy_iter
.next ()) return TRACE_RETURN (false);
692 return TRACE_RETURN ((this+pairSet
[index
]).apply (c
, &valueFormat1
, skippy_iter
.idx
));
695 inline bool sanitize (hb_sanitize_context_t
*c
) {
696 TRACE_SANITIZE (this);
698 unsigned int len1
= valueFormat1
.get_len ();
699 unsigned int len2
= valueFormat2
.get_len ();
700 PairSet::sanitize_closure_t closure
= {
707 return TRACE_RETURN (c
->check_struct (this) && coverage
.sanitize (c
, this) && pairSet
.sanitize (c
, this, &closure
));
711 USHORT format
; /* Format identifier--format = 1 */
713 coverage
; /* Offset to Coverage table--from
714 * beginning of subtable */
715 ValueFormat valueFormat1
; /* Defines the types of data in
716 * ValueRecord1--for the first glyph
717 * in the pair--may be zero (0) */
718 ValueFormat valueFormat2
; /* Defines the types of data in
719 * ValueRecord2--for the second glyph
720 * in the pair--may be zero (0) */
721 OffsetArrayOf
<PairSet
>
722 pairSet
; /* Array of PairSet tables
723 * ordered by Coverage Index */
725 DEFINE_SIZE_ARRAY (10, pairSet
);
728 struct PairPosFormat2
730 inline void collect_glyphs (hb_collect_glyphs_context_t
*c
) const
732 TRACE_COLLECT_GLYPHS (this);
733 /* (this+coverage).add_coverage (c->input); // Don't need this. */
735 unsigned int count1
= class1Count
;
736 const ClassDef
&klass1
= this+classDef1
;
737 for (unsigned int i
= 0; i
< count1
; i
++)
738 klass1
.add_class (c
->input
, i
);
740 unsigned int count2
= class2Count
;
741 const ClassDef
&klass2
= this+classDef2
;
742 for (unsigned int i
= 0; i
< count2
; i
++)
743 klass2
.add_class (c
->input
, i
);
746 inline const Coverage
&get_coverage (void) const
748 return this+coverage
;
751 inline bool apply (hb_apply_context_t
*c
) const
754 hb_buffer_t
*buffer
= c
->buffer
;
755 hb_apply_context_t::skipping_forward_iterator_t
skippy_iter (c
, buffer
->idx
, 1);
756 if (skippy_iter
.has_no_chance ()) return TRACE_RETURN (false);
758 unsigned int index
= (this+coverage
).get_coverage (buffer
->cur().codepoint
);
759 if (likely (index
== NOT_COVERED
)) return TRACE_RETURN (false);
761 if (!skippy_iter
.next ()) return TRACE_RETURN (false);
763 unsigned int len1
= valueFormat1
.get_len ();
764 unsigned int len2
= valueFormat2
.get_len ();
765 unsigned int record_len
= len1
+ len2
;
767 unsigned int klass1
= (this+classDef1
).get_class (buffer
->cur().codepoint
);
768 unsigned int klass2
= (this+classDef2
).get_class (buffer
->info
[skippy_iter
.idx
].codepoint
);
769 if (unlikely (klass1
>= class1Count
|| klass2
>= class2Count
)) return TRACE_RETURN (false);
771 const Value
*v
= &values
[record_len
* (klass1
* class2Count
+ klass2
)];
772 valueFormat1
.apply_value (c
->font
, c
->direction
, this,
773 v
, buffer
->cur_pos());
774 valueFormat2
.apply_value (c
->font
, c
->direction
, this,
775 v
+ len1
, buffer
->pos
[skippy_iter
.idx
]);
777 buffer
->idx
= skippy_iter
.idx
;
781 return TRACE_RETURN (true);
784 inline bool sanitize (hb_sanitize_context_t
*c
) {
785 TRACE_SANITIZE (this);
786 if (!(c
->check_struct (this)
787 && coverage
.sanitize (c
, this)
788 && classDef1
.sanitize (c
, this)
789 && classDef2
.sanitize (c
, this))) return TRACE_RETURN (false);
791 unsigned int len1
= valueFormat1
.get_len ();
792 unsigned int len2
= valueFormat2
.get_len ();
793 unsigned int stride
= len1
+ len2
;
794 unsigned int record_size
= valueFormat1
.get_size () + valueFormat2
.get_size ();
795 unsigned int count
= (unsigned int) class1Count
* (unsigned int) class2Count
;
796 return TRACE_RETURN (c
->check_array (values
, record_size
, count
) &&
797 valueFormat1
.sanitize_values_stride_unsafe (c
, this, &values
[0], count
, stride
) &&
798 valueFormat2
.sanitize_values_stride_unsafe (c
, this, &values
[len1
], count
, stride
));
802 USHORT format
; /* Format identifier--format = 2 */
804 coverage
; /* Offset to Coverage table--from
805 * beginning of subtable */
806 ValueFormat valueFormat1
; /* ValueRecord definition--for the
807 * first glyph of the pair--may be zero
809 ValueFormat valueFormat2
; /* ValueRecord definition--for the
810 * second glyph of the pair--may be
813 classDef1
; /* Offset to ClassDef table--from
814 * beginning of PairPos subtable--for
815 * the first glyph of the pair */
817 classDef2
; /* Offset to ClassDef table--from
818 * beginning of PairPos subtable--for
819 * the second glyph of the pair */
820 USHORT class1Count
; /* Number of classes in ClassDef1
821 * table--includes Class0 */
822 USHORT class2Count
; /* Number of classes in ClassDef2
823 * table--includes Class0 */
824 ValueRecord values
; /* Matrix of value pairs:
825 * class1-major, class2-minor,
826 * Each entry has value1 and value2 */
828 DEFINE_SIZE_ARRAY (16, values
);
833 template <typename context_t
>
834 inline typename
context_t::return_t
dispatch (context_t
*c
) const
836 TRACE_DISPATCH (this, u
.format
);
838 case 1: return TRACE_RETURN (c
->dispatch (u
.format1
));
839 case 2: return TRACE_RETURN (c
->dispatch (u
.format2
));
840 default:return TRACE_RETURN (c
->default_return_value ());
844 inline bool sanitize (hb_sanitize_context_t
*c
) {
845 TRACE_SANITIZE (this);
846 if (!u
.format
.sanitize (c
)) return TRACE_RETURN (false);
848 case 1: return TRACE_RETURN (u
.format1
.sanitize (c
));
849 case 2: return TRACE_RETURN (u
.format2
.sanitize (c
));
850 default:return TRACE_RETURN (true);
856 USHORT format
; /* Format identifier */
857 PairPosFormat1 format1
;
858 PairPosFormat2 format2
;
863 struct EntryExitRecord
865 friend struct CursivePosFormat1
;
867 inline bool sanitize (hb_sanitize_context_t
*c
, void *base
) {
868 TRACE_SANITIZE (this);
869 return TRACE_RETURN (entryAnchor
.sanitize (c
, base
) && exitAnchor
.sanitize (c
, base
));
874 entryAnchor
; /* Offset to EntryAnchor table--from
875 * beginning of CursivePos
876 * subtable--may be NULL */
878 exitAnchor
; /* Offset to ExitAnchor table--from
879 * beginning of CursivePos
880 * subtable--may be NULL */
882 DEFINE_SIZE_STATIC (4);
885 struct CursivePosFormat1
887 inline void collect_glyphs (hb_collect_glyphs_context_t
*c
) const
889 TRACE_COLLECT_GLYPHS (this);
890 (this+coverage
).add_coverage (c
->input
);
893 inline const Coverage
&get_coverage (void) const
895 return this+coverage
;
898 inline bool apply (hb_apply_context_t
*c
) const
901 hb_buffer_t
*buffer
= c
->buffer
;
903 /* We don't handle mark glyphs here. */
904 if (unlikely (_hb_glyph_info_is_mark (&buffer
->cur()))) return TRACE_RETURN (false);
906 hb_apply_context_t::skipping_forward_iterator_t
skippy_iter (c
, buffer
->idx
, 1);
907 if (skippy_iter
.has_no_chance ()) return TRACE_RETURN (false);
909 const EntryExitRecord
&this_record
= entryExitRecord
[(this+coverage
).get_coverage (buffer
->cur().codepoint
)];
910 if (!this_record
.exitAnchor
) return TRACE_RETURN (false);
912 if (!skippy_iter
.next ()) return TRACE_RETURN (false);
914 const EntryExitRecord
&next_record
= entryExitRecord
[(this+coverage
).get_coverage (buffer
->info
[skippy_iter
.idx
].codepoint
)];
915 if (!next_record
.entryAnchor
) return TRACE_RETURN (false);
917 unsigned int i
= buffer
->idx
;
918 unsigned int j
= skippy_iter
.idx
;
920 hb_position_t entry_x
, entry_y
, exit_x
, exit_y
;
921 (this+this_record
.exitAnchor
).get_anchor (c
->font
, buffer
->info
[i
].codepoint
, &exit_x
, &exit_y
);
922 (this+next_record
.entryAnchor
).get_anchor (c
->font
, buffer
->info
[j
].codepoint
, &entry_x
, &entry_y
);
924 hb_glyph_position_t
*pos
= buffer
->pos
;
927 /* Main-direction adjustment */
928 switch (c
->direction
) {
929 case HB_DIRECTION_LTR
:
930 pos
[i
].x_advance
= exit_x
+ pos
[i
].x_offset
;
932 d
= entry_x
+ pos
[j
].x_offset
;
933 pos
[j
].x_advance
-= d
;
934 pos
[j
].x_offset
-= d
;
936 case HB_DIRECTION_RTL
:
937 d
= exit_x
+ pos
[i
].x_offset
;
938 pos
[i
].x_advance
-= d
;
939 pos
[i
].x_offset
-= d
;
941 pos
[j
].x_advance
= entry_x
+ pos
[j
].x_offset
;
943 case HB_DIRECTION_TTB
:
944 pos
[i
].y_advance
= exit_y
+ pos
[i
].y_offset
;
946 d
= entry_y
+ pos
[j
].y_offset
;
947 pos
[j
].y_advance
-= d
;
948 pos
[j
].y_offset
-= d
;
950 case HB_DIRECTION_BTT
:
951 d
= exit_y
+ pos
[i
].y_offset
;
952 pos
[i
].y_advance
-= d
;
953 pos
[i
].y_offset
-= d
;
955 pos
[j
].y_advance
= entry_y
;
957 case HB_DIRECTION_INVALID
:
962 /* Cross-direction adjustment */
963 if (c
->lookup_props
& LookupFlag::RightToLeft
) {
964 pos
[i
].cursive_chain() = j
- i
;
965 if (likely (HB_DIRECTION_IS_HORIZONTAL (c
->direction
)))
966 pos
[i
].y_offset
= entry_y
- exit_y
;
968 pos
[i
].x_offset
= entry_x
- exit_x
;
970 pos
[j
].cursive_chain() = i
- j
;
971 if (likely (HB_DIRECTION_IS_HORIZONTAL (c
->direction
)))
972 pos
[j
].y_offset
= exit_y
- entry_y
;
974 pos
[j
].x_offset
= exit_x
- entry_x
;
978 return TRACE_RETURN (true);
981 inline bool sanitize (hb_sanitize_context_t
*c
) {
982 TRACE_SANITIZE (this);
983 return TRACE_RETURN (coverage
.sanitize (c
, this) && entryExitRecord
.sanitize (c
, this));
987 USHORT format
; /* Format identifier--format = 1 */
989 coverage
; /* Offset to Coverage table--from
990 * beginning of subtable */
991 ArrayOf
<EntryExitRecord
>
992 entryExitRecord
; /* Array of EntryExit records--in
993 * Coverage Index order */
995 DEFINE_SIZE_ARRAY (6, entryExitRecord
);
1000 template <typename context_t
>
1001 inline typename
context_t::return_t
dispatch (context_t
*c
) const
1003 TRACE_DISPATCH (this, u
.format
);
1005 case 1: return TRACE_RETURN (c
->dispatch (u
.format1
));
1006 default:return TRACE_RETURN (c
->default_return_value ());
1010 inline bool sanitize (hb_sanitize_context_t
*c
) {
1011 TRACE_SANITIZE (this);
1012 if (!u
.format
.sanitize (c
)) return TRACE_RETURN (false);
1014 case 1: return TRACE_RETURN (u
.format1
.sanitize (c
));
1015 default:return TRACE_RETURN (true);
1021 USHORT format
; /* Format identifier */
1022 CursivePosFormat1 format1
;
1027 typedef AnchorMatrix BaseArray
; /* base-major--
1028 * in order of BaseCoverage Index--,
1030 * ordered by class--zero-based. */
1032 struct MarkBasePosFormat1
1034 inline void collect_glyphs (hb_collect_glyphs_context_t
*c
) const
1036 TRACE_COLLECT_GLYPHS (this);
1037 (this+markCoverage
).add_coverage (c
->input
);
1038 (this+baseCoverage
).add_coverage (c
->input
);
1041 inline const Coverage
&get_coverage (void) const
1043 return this+markCoverage
;
1046 inline bool apply (hb_apply_context_t
*c
) const
1049 hb_buffer_t
*buffer
= c
->buffer
;
1050 unsigned int mark_index
= (this+markCoverage
).get_coverage (buffer
->cur().codepoint
);
1051 if (likely (mark_index
== NOT_COVERED
)) return TRACE_RETURN (false);
1053 /* now we search backwards for a non-mark glyph */
1054 hb_apply_context_t::skipping_backward_iterator_t
skippy_iter (c
, buffer
->idx
, 1);
1055 skippy_iter
.set_lookup_props (LookupFlag::IgnoreMarks
);
1057 if (!skippy_iter
.prev ()) return TRACE_RETURN (false);
1058 /* We only want to attach to the first of a MultipleSubst sequence. Reject others. */
1059 if (0 == _hb_glyph_info_get_lig_comp (&buffer
->info
[skippy_iter
.idx
])) break;
1060 skippy_iter
.reject ();
1063 /* Checking that matched glyph is actually a base glyph by GDEF is too strong; disabled */
1064 if (!_hb_glyph_info_is_base_glyph (&buffer
->info
[skippy_iter
.idx
])) { /*return TRACE_RETURN (false);*/ }
1066 unsigned int base_index
= (this+baseCoverage
).get_coverage (buffer
->info
[skippy_iter
.idx
].codepoint
);
1067 if (base_index
== NOT_COVERED
) return TRACE_RETURN (false);
1069 return TRACE_RETURN ((this+markArray
).apply (c
, mark_index
, base_index
, this+baseArray
, classCount
, skippy_iter
.idx
));
1072 inline bool sanitize (hb_sanitize_context_t
*c
) {
1073 TRACE_SANITIZE (this);
1074 return TRACE_RETURN (c
->check_struct (this) && markCoverage
.sanitize (c
, this) && baseCoverage
.sanitize (c
, this) &&
1075 markArray
.sanitize (c
, this) && baseArray
.sanitize (c
, this, (unsigned int) classCount
));
1079 USHORT format
; /* Format identifier--format = 1 */
1081 markCoverage
; /* Offset to MarkCoverage table--from
1082 * beginning of MarkBasePos subtable */
1084 baseCoverage
; /* Offset to BaseCoverage table--from
1085 * beginning of MarkBasePos subtable */
1086 USHORT classCount
; /* Number of classes defined for marks */
1088 markArray
; /* Offset to MarkArray table--from
1089 * beginning of MarkBasePos subtable */
1091 baseArray
; /* Offset to BaseArray table--from
1092 * beginning of MarkBasePos subtable */
1094 DEFINE_SIZE_STATIC (12);
1099 template <typename context_t
>
1100 inline typename
context_t::return_t
dispatch (context_t
*c
) const
1102 TRACE_DISPATCH (this, u
.format
);
1104 case 1: return TRACE_RETURN (c
->dispatch (u
.format1
));
1105 default:return TRACE_RETURN (c
->default_return_value ());
1109 inline bool sanitize (hb_sanitize_context_t
*c
) {
1110 TRACE_SANITIZE (this);
1111 if (!u
.format
.sanitize (c
)) return TRACE_RETURN (false);
1113 case 1: return TRACE_RETURN (u
.format1
.sanitize (c
));
1114 default:return TRACE_RETURN (true);
1120 USHORT format
; /* Format identifier */
1121 MarkBasePosFormat1 format1
;
1126 typedef AnchorMatrix LigatureAttach
; /* component-major--
1127 * in order of writing direction--,
1129 * ordered by class--zero-based. */
1131 typedef OffsetListOf
<LigatureAttach
> LigatureArray
;
1132 /* Array of LigatureAttach
1134 * LigatureCoverage Index */
1136 struct MarkLigPosFormat1
1138 inline void collect_glyphs (hb_collect_glyphs_context_t
*c
) const
1140 TRACE_COLLECT_GLYPHS (this);
1141 (this+markCoverage
).add_coverage (c
->input
);
1142 (this+ligatureCoverage
).add_coverage (c
->input
);
1145 inline const Coverage
&get_coverage (void) const
1147 return this+markCoverage
;
1150 inline bool apply (hb_apply_context_t
*c
) const
1153 hb_buffer_t
*buffer
= c
->buffer
;
1154 unsigned int mark_index
= (this+markCoverage
).get_coverage (buffer
->cur().codepoint
);
1155 if (likely (mark_index
== NOT_COVERED
)) return TRACE_RETURN (false);
1157 /* now we search backwards for a non-mark glyph */
1158 hb_apply_context_t::skipping_backward_iterator_t
skippy_iter (c
, buffer
->idx
, 1);
1159 skippy_iter
.set_lookup_props (LookupFlag::IgnoreMarks
);
1160 if (!skippy_iter
.prev ()) return TRACE_RETURN (false);
1162 /* Checking that matched glyph is actually a ligature by GDEF is too strong; disabled */
1163 if (!_hb_glyph_info_is_ligature (&buffer
->info
[skippy_iter
.idx
])) { /*return TRACE_RETURN (false);*/ }
1165 unsigned int j
= skippy_iter
.idx
;
1166 unsigned int lig_index
= (this+ligatureCoverage
).get_coverage (buffer
->info
[j
].codepoint
);
1167 if (lig_index
== NOT_COVERED
) return TRACE_RETURN (false);
1169 const LigatureArray
& lig_array
= this+ligatureArray
;
1170 const LigatureAttach
& lig_attach
= lig_array
[lig_index
];
1172 /* Find component to attach to */
1173 unsigned int comp_count
= lig_attach
.rows
;
1174 if (unlikely (!comp_count
)) return TRACE_RETURN (false);
1176 /* We must now check whether the ligature ID of the current mark glyph
1177 * is identical to the ligature ID of the found ligature. If yes, we
1178 * can directly use the component index. If not, we attach the mark
1179 * glyph to the last component of the ligature. */
1180 unsigned int comp_index
;
1181 unsigned int lig_id
= _hb_glyph_info_get_lig_id (&buffer
->info
[j
]);
1182 unsigned int mark_id
= _hb_glyph_info_get_lig_id (&buffer
->cur());
1183 unsigned int mark_comp
= _hb_glyph_info_get_lig_comp (&buffer
->cur());
1184 if (lig_id
&& lig_id
== mark_id
&& mark_comp
> 0)
1185 comp_index
= MIN (comp_count
, _hb_glyph_info_get_lig_comp (&buffer
->cur())) - 1;
1187 comp_index
= comp_count
- 1;
1189 return TRACE_RETURN ((this+markArray
).apply (c
, mark_index
, comp_index
, lig_attach
, classCount
, j
));
1192 inline bool sanitize (hb_sanitize_context_t
*c
) {
1193 TRACE_SANITIZE (this);
1194 return TRACE_RETURN (c
->check_struct (this) && markCoverage
.sanitize (c
, this) && ligatureCoverage
.sanitize (c
, this) &&
1195 markArray
.sanitize (c
, this) && ligatureArray
.sanitize (c
, this, (unsigned int) classCount
));
1199 USHORT format
; /* Format identifier--format = 1 */
1201 markCoverage
; /* Offset to Mark Coverage table--from
1202 * beginning of MarkLigPos subtable */
1204 ligatureCoverage
; /* Offset to Ligature Coverage
1205 * table--from beginning of MarkLigPos
1207 USHORT classCount
; /* Number of defined mark classes */
1209 markArray
; /* Offset to MarkArray table--from
1210 * beginning of MarkLigPos subtable */
1211 OffsetTo
<LigatureArray
>
1212 ligatureArray
; /* Offset to LigatureArray table--from
1213 * beginning of MarkLigPos subtable */
1215 DEFINE_SIZE_STATIC (12);
1220 template <typename context_t
>
1221 inline typename
context_t::return_t
dispatch (context_t
*c
) const
1223 TRACE_DISPATCH (this, u
.format
);
1225 case 1: return TRACE_RETURN (c
->dispatch (u
.format1
));
1226 default:return TRACE_RETURN (c
->default_return_value ());
1230 inline bool sanitize (hb_sanitize_context_t
*c
) {
1231 TRACE_SANITIZE (this);
1232 if (!u
.format
.sanitize (c
)) return TRACE_RETURN (false);
1234 case 1: return TRACE_RETURN (u
.format1
.sanitize (c
));
1235 default:return TRACE_RETURN (true);
1241 USHORT format
; /* Format identifier */
1242 MarkLigPosFormat1 format1
;
1247 typedef AnchorMatrix Mark2Array
; /* mark2-major--
1248 * in order of Mark2Coverage Index--,
1250 * ordered by class--zero-based. */
1252 struct MarkMarkPosFormat1
1254 inline void collect_glyphs (hb_collect_glyphs_context_t
*c
) const
1256 TRACE_COLLECT_GLYPHS (this);
1257 (this+mark1Coverage
).add_coverage (c
->input
);
1258 (this+mark2Coverage
).add_coverage (c
->input
);
1261 inline const Coverage
&get_coverage (void) const
1263 return this+mark1Coverage
;
1266 inline bool apply (hb_apply_context_t
*c
) const
1269 hb_buffer_t
*buffer
= c
->buffer
;
1270 unsigned int mark1_index
= (this+mark1Coverage
).get_coverage (buffer
->cur().codepoint
);
1271 if (likely (mark1_index
== NOT_COVERED
)) return TRACE_RETURN (false);
1273 /* now we search backwards for a suitable mark glyph until a non-mark glyph */
1274 hb_apply_context_t::skipping_backward_iterator_t
skippy_iter (c
, buffer
->idx
, 1);
1275 skippy_iter
.set_lookup_props (c
->lookup_props
& ~LookupFlag::IgnoreFlags
);
1276 if (!skippy_iter
.prev ()) return TRACE_RETURN (false);
1278 if (!_hb_glyph_info_is_mark (&buffer
->info
[skippy_iter
.idx
])) { return TRACE_RETURN (false); }
1280 unsigned int j
= skippy_iter
.idx
;
1282 unsigned int id1
= _hb_glyph_info_get_lig_id (&buffer
->cur());
1283 unsigned int id2
= _hb_glyph_info_get_lig_id (&buffer
->info
[j
]);
1284 unsigned int comp1
= _hb_glyph_info_get_lig_comp (&buffer
->cur());
1285 unsigned int comp2
= _hb_glyph_info_get_lig_comp (&buffer
->info
[j
]);
1287 if (likely (id1
== id2
)) {
1288 if (id1
== 0) /* Marks belonging to the same base. */
1290 else if (comp1
== comp2
) /* Marks belonging to the same ligature component. */
1293 /* If ligature ids don't match, it may be the case that one of the marks
1294 * itself is a ligature. In which case match. */
1295 if ((id1
> 0 && !comp1
) || (id2
> 0 && !comp2
))
1300 return TRACE_RETURN (false);
1303 unsigned int mark2_index
= (this+mark2Coverage
).get_coverage (buffer
->info
[j
].codepoint
);
1304 if (mark2_index
== NOT_COVERED
) return TRACE_RETURN (false);
1306 return TRACE_RETURN ((this+mark1Array
).apply (c
, mark1_index
, mark2_index
, this+mark2Array
, classCount
, j
));
1309 inline bool sanitize (hb_sanitize_context_t
*c
) {
1310 TRACE_SANITIZE (this);
1311 return TRACE_RETURN (c
->check_struct (this) && mark1Coverage
.sanitize (c
, this) &&
1312 mark2Coverage
.sanitize (c
, this) && mark1Array
.sanitize (c
, this)
1313 && mark2Array
.sanitize (c
, this, (unsigned int) classCount
));
1317 USHORT format
; /* Format identifier--format = 1 */
1319 mark1Coverage
; /* Offset to Combining Mark1 Coverage
1320 * table--from beginning of MarkMarkPos
1323 mark2Coverage
; /* Offset to Combining Mark2 Coverage
1324 * table--from beginning of MarkMarkPos
1326 USHORT classCount
; /* Number of defined mark classes */
1328 mark1Array
; /* Offset to Mark1Array table--from
1329 * beginning of MarkMarkPos subtable */
1330 OffsetTo
<Mark2Array
>
1331 mark2Array
; /* Offset to Mark2Array table--from
1332 * beginning of MarkMarkPos subtable */
1334 DEFINE_SIZE_STATIC (12);
1339 template <typename context_t
>
1340 inline typename
context_t::return_t
dispatch (context_t
*c
) const
1342 TRACE_DISPATCH (this, u
.format
);
1344 case 1: return TRACE_RETURN (c
->dispatch (u
.format1
));
1345 default:return TRACE_RETURN (c
->default_return_value ());
1349 inline bool sanitize (hb_sanitize_context_t
*c
) {
1350 TRACE_SANITIZE (this);
1351 if (!u
.format
.sanitize (c
)) return TRACE_RETURN (false);
1353 case 1: return TRACE_RETURN (u
.format1
.sanitize (c
));
1354 default:return TRACE_RETURN (true);
1360 USHORT format
; /* Format identifier */
1361 MarkMarkPosFormat1 format1
;
1366 struct ContextPos
: Context
{};
1368 struct ChainContextPos
: ChainContext
{};
1370 struct ExtensionPos
: Extension
<ExtensionPos
>
1372 typedef struct PosLookupSubTable LookupSubTable
;
1382 struct PosLookupSubTable
1384 friend struct PosLookup
;
1398 template <typename context_t
>
1399 inline typename
context_t::return_t
dispatch (context_t
*c
, unsigned int lookup_type
) const
1401 TRACE_DISPATCH (this, lookup_type
);
1402 switch (lookup_type
) {
1403 case Single
: return TRACE_RETURN (u
.single
.dispatch (c
));
1404 case Pair
: return TRACE_RETURN (u
.pair
.dispatch (c
));
1405 case Cursive
: return TRACE_RETURN (u
.cursive
.dispatch (c
));
1406 case MarkBase
: return TRACE_RETURN (u
.markBase
.dispatch (c
));
1407 case MarkLig
: return TRACE_RETURN (u
.markLig
.dispatch (c
));
1408 case MarkMark
: return TRACE_RETURN (u
.markMark
.dispatch (c
));
1409 case Context
: return TRACE_RETURN (u
.context
.dispatch (c
));
1410 case ChainContext
: return TRACE_RETURN (u
.chainContext
.dispatch (c
));
1411 case Extension
: return TRACE_RETURN (u
.extension
.dispatch (c
));
1412 default: return TRACE_RETURN (c
->default_return_value ());
1416 inline bool sanitize (hb_sanitize_context_t
*c
, unsigned int lookup_type
) {
1417 TRACE_SANITIZE (this);
1418 if (!u
.header
.sub_format
.sanitize (c
))
1419 return TRACE_RETURN (false);
1420 switch (lookup_type
) {
1421 case Single
: return TRACE_RETURN (u
.single
.sanitize (c
));
1422 case Pair
: return TRACE_RETURN (u
.pair
.sanitize (c
));
1423 case Cursive
: return TRACE_RETURN (u
.cursive
.sanitize (c
));
1424 case MarkBase
: return TRACE_RETURN (u
.markBase
.sanitize (c
));
1425 case MarkLig
: return TRACE_RETURN (u
.markLig
.sanitize (c
));
1426 case MarkMark
: return TRACE_RETURN (u
.markMark
.sanitize (c
));
1427 case Context
: return TRACE_RETURN (u
.context
.sanitize (c
));
1428 case ChainContext
: return TRACE_RETURN (u
.chainContext
.sanitize (c
));
1429 case Extension
: return TRACE_RETURN (u
.extension
.sanitize (c
));
1430 default: return TRACE_RETURN (true);
1442 MarkBasePos markBase
;
1444 MarkMarkPos markMark
;
1446 ChainContextPos chainContext
;
1447 ExtensionPos extension
;
1450 DEFINE_SIZE_UNION (2, header
.sub_format
);
1454 struct PosLookup
: Lookup
1456 inline const PosLookupSubTable
& get_subtable (unsigned int i
) const
1457 { return this+CastR
<OffsetArrayOf
<PosLookupSubTable
> > (subTable
)[i
]; }
1459 inline bool is_reverse (void) const
1464 inline hb_collect_glyphs_context_t::return_t
collect_glyphs (hb_collect_glyphs_context_t
*c
) const
1466 TRACE_COLLECT_GLYPHS (this);
1467 c
->set_recurse_func (NULL
);
1468 return TRACE_RETURN (dispatch (c
));
1471 template <typename set_t
>
1472 inline void add_coverage (set_t
*glyphs
) const
1474 hb_get_coverage_context_t c
;
1475 const Coverage
*last
= NULL
;
1476 unsigned int count
= get_subtable_count ();
1477 for (unsigned int i
= 0; i
< count
; i
++) {
1478 const Coverage
*coverage
= &get_subtable (i
).dispatch (&c
, get_type ());
1479 if (coverage
!= last
) {
1480 coverage
->add_coverage (glyphs
);
1486 inline bool apply_once (hb_apply_context_t
*c
) const
1489 if (!c
->check_glyph_property (&c
->buffer
->cur(), c
->lookup_props
))
1490 return TRACE_RETURN (false);
1491 return TRACE_RETURN (dispatch (c
));
1494 static bool apply_recurse_func (hb_apply_context_t
*c
, unsigned int lookup_index
);
1496 template <typename context_t
>
1497 static inline typename
context_t::return_t
dispatch_recurse_func (context_t
*c
, unsigned int lookup_index
);
1499 template <typename context_t
>
1500 inline typename
context_t::return_t
dispatch (context_t
*c
) const
1502 unsigned int lookup_type
= get_type ();
1503 TRACE_DISPATCH (this, lookup_type
);
1504 unsigned int count
= get_subtable_count ();
1505 for (unsigned int i
= 0; i
< count
; i
++) {
1506 typename
context_t::return_t r
= get_subtable (i
).dispatch (c
, lookup_type
);
1507 if (c
->stop_sublookup_iteration (r
))
1508 return TRACE_RETURN (r
);
1510 return TRACE_RETURN (c
->default_return_value ());
1513 inline bool sanitize (hb_sanitize_context_t
*c
) {
1514 TRACE_SANITIZE (this);
1515 if (unlikely (!Lookup::sanitize (c
))) return TRACE_RETURN (false);
1516 OffsetArrayOf
<PosLookupSubTable
> &list
= CastR
<OffsetArrayOf
<PosLookupSubTable
> > (subTable
);
1517 return TRACE_RETURN (list
.sanitize (c
, this, get_type ()));
1521 typedef OffsetListOf
<PosLookup
> PosLookupList
;
1524 * GPOS -- The Glyph Positioning Table
1527 struct GPOS
: GSUBGPOS
1529 static const hb_tag_t tableTag
= HB_OT_TAG_GPOS
;
1531 inline const PosLookup
& get_lookup (unsigned int i
) const
1532 { return CastR
<PosLookup
> (GSUBGPOS::get_lookup (i
)); }
1534 static inline void position_start (hb_font_t
*font
, hb_buffer_t
*buffer
);
1535 static inline void position_finish (hb_font_t
*font
, hb_buffer_t
*buffer
);
1537 inline bool sanitize (hb_sanitize_context_t
*c
) {
1538 TRACE_SANITIZE (this);
1539 if (unlikely (!GSUBGPOS::sanitize (c
))) return TRACE_RETURN (false);
1540 OffsetTo
<PosLookupList
> &list
= CastR
<OffsetTo
<PosLookupList
> > (lookupList
);
1541 return TRACE_RETURN (list
.sanitize (c
, this));
1544 DEFINE_SIZE_STATIC (10);
1549 fix_cursive_minor_offset (hb_glyph_position_t
*pos
, unsigned int i
, hb_direction_t direction
)
1551 unsigned int j
= pos
[i
].cursive_chain();
1557 pos
[i
].cursive_chain() = 0;
1559 fix_cursive_minor_offset (pos
, j
, direction
);
1561 if (HB_DIRECTION_IS_HORIZONTAL (direction
))
1562 pos
[i
].y_offset
+= pos
[j
].y_offset
;
1564 pos
[i
].x_offset
+= pos
[j
].x_offset
;
1568 fix_mark_attachment (hb_glyph_position_t
*pos
, unsigned int i
, hb_direction_t direction
)
1570 if (likely (!(pos
[i
].attach_lookback())))
1573 unsigned int j
= i
- pos
[i
].attach_lookback();
1575 pos
[i
].x_offset
+= pos
[j
].x_offset
;
1576 pos
[i
].y_offset
+= pos
[j
].y_offset
;
1578 if (HB_DIRECTION_IS_FORWARD (direction
))
1579 for (unsigned int k
= j
; k
< i
; k
++) {
1580 pos
[i
].x_offset
-= pos
[k
].x_advance
;
1581 pos
[i
].y_offset
-= pos
[k
].y_advance
;
1584 for (unsigned int k
= j
+ 1; k
< i
+ 1; k
++) {
1585 pos
[i
].x_offset
+= pos
[k
].x_advance
;
1586 pos
[i
].y_offset
+= pos
[k
].y_advance
;
1591 GPOS::position_start (hb_font_t
*font HB_UNUSED
, hb_buffer_t
*buffer
)
1593 buffer
->clear_positions ();
1595 unsigned int count
= buffer
->len
;
1596 for (unsigned int i
= 0; i
< count
; i
++)
1597 buffer
->pos
[i
].attach_lookback() = buffer
->pos
[i
].cursive_chain() = 0;
1601 GPOS::position_finish (hb_font_t
*font HB_UNUSED
, hb_buffer_t
*buffer
)
1603 _hb_buffer_assert_gsubgpos_vars (buffer
);
1606 hb_glyph_position_t
*pos
= hb_buffer_get_glyph_positions (buffer
, &len
);
1607 hb_direction_t direction
= buffer
->props
.direction
;
1609 /* Handle cursive connections */
1610 for (unsigned int i
= 0; i
< len
; i
++)
1611 fix_cursive_minor_offset (pos
, i
, direction
);
1613 /* Handle attachments */
1614 for (unsigned int i
= 0; i
< len
; i
++)
1615 fix_mark_attachment (pos
, i
, direction
);
1619 /* Out-of-class implementation for methods recursing */
1621 template <typename context_t
>
1622 /*static*/ inline typename
context_t::return_t
PosLookup::dispatch_recurse_func (context_t
*c
, unsigned int lookup_index
)
1624 const GPOS
&gpos
= *(hb_ot_layout_from_face (c
->face
)->gpos
);
1625 const PosLookup
&l
= gpos
.get_lookup (lookup_index
);
1626 return l
.dispatch (c
);
1629 /*static*/ inline bool PosLookup::apply_recurse_func (hb_apply_context_t
*c
, unsigned int lookup_index
)
1631 const GPOS
&gpos
= *(hb_ot_layout_from_face (c
->face
)->gpos
);
1632 const PosLookup
&l
= gpos
.get_lookup (lookup_index
);
1633 unsigned int saved_lookup_props
= c
->lookup_props
;
1635 bool ret
= l
.apply_once (c
);
1636 c
->lookup_props
= saved_lookup_props
;
1641 #undef attach_lookback
1642 #undef cursive_chain
1645 } /* namespace OT */
1648 #endif /* HB_OT_LAYOUT_GPOS_TABLE_HH */