2 * Copyright © 2007,2008,2009,2010 Red Hat, Inc.
3 * Copyright © 2010,2012,2013 Google, Inc.
5 * This is part of HarfBuzz, a text shaping library.
7 * Permission is hereby granted, without written agreement and without
8 * license or royalty fees, to use, copy, modify, and distribute this
9 * software and its documentation for any purpose, provided that the
10 * above copyright notice and the following two paragraphs appear in
11 * all copies of this software.
13 * IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR
14 * DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES
15 * ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN
16 * IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
19 * THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING,
20 * BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
21 * FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE PROVIDED HEREUNDER IS
22 * ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO
23 * PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
25 * Red Hat Author(s): Behdad Esfahbod
26 * Google Author(s): Behdad Esfahbod
29 #ifndef HB_OT_LAYOUT_GPOS_TABLE_HH
30 #define HB_OT_LAYOUT_GPOS_TABLE_HH
32 #include "hb-ot-layout-gsubgpos-private.hh"
38 /* buffer **position** var allocations */
39 #define attach_lookback() var.u16[0] /* number of glyphs to go back to attach this glyph to its base */
40 #define cursive_chain() var.i16[1] /* character to which this connects, may be positive or negative */
43 /* Shared Tables: ValueRecord, Anchor Table, and MarkArray */
47 typedef Value ValueRecord
[VAR
];
49 struct ValueFormat
: USHORT
52 xPlacement
= 0x0001u
, /* Includes horizontal adjustment for placement */
53 yPlacement
= 0x0002u
, /* Includes vertical adjustment for placement */
54 xAdvance
= 0x0004u
, /* Includes horizontal adjustment for advance */
55 yAdvance
= 0x0008u
, /* Includes vertical adjustment for advance */
56 xPlaDevice
= 0x0010u
, /* Includes horizontal Device table for placement */
57 yPlaDevice
= 0x0020u
, /* Includes vertical Device table for placement */
58 xAdvDevice
= 0x0040u
, /* Includes horizontal Device table for advance */
59 yAdvDevice
= 0x0080u
, /* Includes vertical Device table for advance */
60 ignored
= 0x0F00u
, /* Was used in TrueType Open for MM fonts */
61 reserved
= 0xF000u
, /* For future use */
63 devices
= 0x00F0u
/* Mask for having any Device table */
66 /* All fields are options. Only those available advance the value pointer. */
68 SHORT xPlacement
; /* Horizontal adjustment for
69 * placement--in design units */
70 SHORT yPlacement
; /* Vertical adjustment for
71 * placement--in design units */
72 SHORT xAdvance
; /* Horizontal adjustment for
73 * advance--in design units (only used
74 * for horizontal writing) */
75 SHORT yAdvance
; /* Vertical adjustment for advance--in
76 * design units (only used for vertical
78 Offset xPlaDevice
; /* Offset to Device table for
79 * horizontal placement--measured from
80 * beginning of PosTable (may be NULL) */
81 Offset yPlaDevice
; /* Offset to Device table for vertical
82 * placement--measured from beginning
83 * of PosTable (may be NULL) */
84 Offset xAdvDevice
; /* Offset to Device table for
85 * horizontal advance--measured from
86 * beginning of PosTable (may be NULL) */
87 Offset yAdvDevice
; /* Offset to Device table for vertical
88 * advance--measured from beginning of
89 * PosTable (may be NULL) */
92 inline unsigned int get_len (void) const
93 { return _hb_popcount32 ((unsigned int) *this); }
94 inline unsigned int get_size (void) const
95 { return get_len () * Value::static_size
; }
97 void apply_value (hb_font_t
*font
,
98 hb_direction_t direction
,
101 hb_glyph_position_t
&glyph_pos
) const
103 unsigned int x_ppem
, y_ppem
;
104 unsigned int format
= *this;
105 hb_bool_t horizontal
= HB_DIRECTION_IS_HORIZONTAL (direction
);
109 if (format
& xPlacement
) glyph_pos
.x_offset
+= font
->em_scale_x (get_short (values
++));
110 if (format
& yPlacement
) glyph_pos
.y_offset
+= font
->em_scale_y (get_short (values
++));
111 if (format
& xAdvance
) {
112 if (likely (horizontal
)) glyph_pos
.x_advance
+= font
->em_scale_x (get_short (values
));
115 /* y_advance values grow downward but font-space grows upward, hence negation */
116 if (format
& yAdvance
) {
117 if (unlikely (!horizontal
)) glyph_pos
.y_advance
-= font
->em_scale_y (get_short (values
));
121 if (!has_device ()) return;
123 x_ppem
= font
->x_ppem
;
124 y_ppem
= font
->y_ppem
;
126 if (!x_ppem
&& !y_ppem
) return;
128 /* pixel -> fractional pixel */
129 if (format
& xPlaDevice
) {
130 if (x_ppem
) glyph_pos
.x_offset
+= (base
+ get_device (values
)).get_x_delta (font
);
133 if (format
& yPlaDevice
) {
134 if (y_ppem
) glyph_pos
.y_offset
+= (base
+ get_device (values
)).get_y_delta (font
);
137 if (format
& xAdvDevice
) {
138 if (horizontal
&& x_ppem
) glyph_pos
.x_advance
+= (base
+ get_device (values
)).get_x_delta (font
);
141 if (format
& yAdvDevice
) {
142 /* y_advance values grow downward but font-space grows upward, hence negation */
143 if (!horizontal
&& y_ppem
) glyph_pos
.y_advance
-= (base
+ get_device (values
)).get_y_delta (font
);
149 inline bool sanitize_value_devices (hb_sanitize_context_t
*c
, void *base
, Value
*values
) {
150 unsigned int format
= *this;
152 if (format
& xPlacement
) values
++;
153 if (format
& yPlacement
) values
++;
154 if (format
& xAdvance
) values
++;
155 if (format
& yAdvance
) values
++;
157 if ((format
& xPlaDevice
) && !get_device (values
++).sanitize (c
, base
)) return false;
158 if ((format
& yPlaDevice
) && !get_device (values
++).sanitize (c
, base
)) return false;
159 if ((format
& xAdvDevice
) && !get_device (values
++).sanitize (c
, base
)) return false;
160 if ((format
& yAdvDevice
) && !get_device (values
++).sanitize (c
, base
)) return false;
165 static inline OffsetTo
<Device
>& get_device (Value
* value
)
166 { return *CastP
<OffsetTo
<Device
> > (value
); }
167 static inline const OffsetTo
<Device
>& get_device (const Value
* value
)
168 { return *CastP
<OffsetTo
<Device
> > (value
); }
170 static inline const SHORT
& get_short (const Value
* value
)
171 { return *CastP
<SHORT
> (value
); }
175 inline bool has_device (void) const {
176 unsigned int format
= *this;
177 return (format
& devices
) != 0;
180 inline bool sanitize_value (hb_sanitize_context_t
*c
, void *base
, Value
*values
) {
181 TRACE_SANITIZE (this);
182 return TRACE_RETURN (c
->check_range (values
, get_size ()) && (!has_device () || sanitize_value_devices (c
, base
, values
)));
185 inline bool sanitize_values (hb_sanitize_context_t
*c
, void *base
, Value
*values
, unsigned int count
) {
186 TRACE_SANITIZE (this);
187 unsigned int len
= get_len ();
189 if (!c
->check_array (values
, get_size (), count
)) return TRACE_RETURN (false);
191 if (!has_device ()) return TRACE_RETURN (true);
193 for (unsigned int i
= 0; i
< count
; i
++) {
194 if (!sanitize_value_devices (c
, base
, values
))
195 return TRACE_RETURN (false);
199 return TRACE_RETURN (true);
202 /* Just sanitize referenced Device tables. Doesn't check the values themselves. */
203 inline bool sanitize_values_stride_unsafe (hb_sanitize_context_t
*c
, void *base
, Value
*values
, unsigned int count
, unsigned int stride
) {
204 TRACE_SANITIZE (this);
206 if (!has_device ()) return TRACE_RETURN (true);
208 for (unsigned int i
= 0; i
< count
; i
++) {
209 if (!sanitize_value_devices (c
, base
, values
))
210 return TRACE_RETURN (false);
214 return TRACE_RETURN (true);
221 inline void get_anchor (hb_font_t
*font
, hb_codepoint_t glyph_id HB_UNUSED
,
222 hb_position_t
*x
, hb_position_t
*y
) const
224 *x
= font
->em_scale_x (xCoordinate
);
225 *y
= font
->em_scale_y (yCoordinate
);
228 inline bool sanitize (hb_sanitize_context_t
*c
) {
229 TRACE_SANITIZE (this);
230 return TRACE_RETURN (c
->check_struct (this));
234 USHORT format
; /* Format identifier--format = 1 */
235 SHORT xCoordinate
; /* Horizontal value--in design units */
236 SHORT yCoordinate
; /* Vertical value--in design units */
238 DEFINE_SIZE_STATIC (6);
243 inline void get_anchor (hb_font_t
*font
, hb_codepoint_t glyph_id
,
244 hb_position_t
*x
, hb_position_t
*y
) const
246 unsigned int x_ppem
= font
->x_ppem
;
247 unsigned int y_ppem
= font
->y_ppem
;
248 hb_position_t cx
, cy
;
251 ret
= (x_ppem
|| y_ppem
) &&
252 font
->get_glyph_contour_point_for_origin (glyph_id
, anchorPoint
, HB_DIRECTION_LTR
, &cx
, &cy
);
253 *x
= ret
&& x_ppem
? cx
: font
->em_scale_x (xCoordinate
);
254 *y
= ret
&& y_ppem
? cy
: font
->em_scale_y (yCoordinate
);
257 inline bool sanitize (hb_sanitize_context_t
*c
) {
258 TRACE_SANITIZE (this);
259 return TRACE_RETURN (c
->check_struct (this));
263 USHORT format
; /* Format identifier--format = 2 */
264 SHORT xCoordinate
; /* Horizontal value--in design units */
265 SHORT yCoordinate
; /* Vertical value--in design units */
266 USHORT anchorPoint
; /* Index to glyph contour point */
268 DEFINE_SIZE_STATIC (8);
273 inline void get_anchor (hb_font_t
*font
, hb_codepoint_t glyph_id HB_UNUSED
,
274 hb_position_t
*x
, hb_position_t
*y
) const
276 *x
= font
->em_scale_x (xCoordinate
);
277 *y
= font
->em_scale_y (yCoordinate
);
280 *x
+= (this+xDeviceTable
).get_x_delta (font
);
282 *y
+= (this+yDeviceTable
).get_x_delta (font
);
285 inline bool sanitize (hb_sanitize_context_t
*c
) {
286 TRACE_SANITIZE (this);
287 return TRACE_RETURN (c
->check_struct (this) && xDeviceTable
.sanitize (c
, this) && yDeviceTable
.sanitize (c
, this));
291 USHORT format
; /* Format identifier--format = 3 */
292 SHORT xCoordinate
; /* Horizontal value--in design units */
293 SHORT yCoordinate
; /* Vertical value--in design units */
295 xDeviceTable
; /* Offset to Device table for X
296 * coordinate-- from beginning of
297 * Anchor table (may be NULL) */
299 yDeviceTable
; /* Offset to Device table for Y
300 * coordinate-- from beginning of
301 * Anchor table (may be NULL) */
303 DEFINE_SIZE_STATIC (10);
308 inline void get_anchor (hb_font_t
*font
, hb_codepoint_t glyph_id
,
309 hb_position_t
*x
, hb_position_t
*y
) const
313 case 1: u
.format1
.get_anchor (font
, glyph_id
, x
, y
); return;
314 case 2: u
.format2
.get_anchor (font
, glyph_id
, x
, y
); return;
315 case 3: u
.format3
.get_anchor (font
, glyph_id
, x
, y
); return;
320 inline bool sanitize (hb_sanitize_context_t
*c
) {
321 TRACE_SANITIZE (this);
322 if (!u
.format
.sanitize (c
)) return TRACE_RETURN (false);
324 case 1: return TRACE_RETURN (u
.format1
.sanitize (c
));
325 case 2: return TRACE_RETURN (u
.format2
.sanitize (c
));
326 case 3: return TRACE_RETURN (u
.format3
.sanitize (c
));
327 default:return TRACE_RETURN (true);
333 USHORT format
; /* Format identifier */
334 AnchorFormat1 format1
;
335 AnchorFormat2 format2
;
336 AnchorFormat3 format3
;
339 DEFINE_SIZE_UNION (2, format
);
345 inline const Anchor
& get_anchor (unsigned int row
, unsigned int col
, unsigned int cols
, bool *found
) const {
347 if (unlikely (row
>= rows
|| col
>= cols
)) return Null(Anchor
);
348 *found
= !matrix
[row
* cols
+ col
].is_null ();
349 return this+matrix
[row
* cols
+ col
];
352 inline bool sanitize (hb_sanitize_context_t
*c
, unsigned int cols
) {
353 TRACE_SANITIZE (this);
354 if (!c
->check_struct (this)) return TRACE_RETURN (false);
355 if (unlikely (rows
> 0 && cols
>= ((unsigned int) -1) / rows
)) return TRACE_RETURN (false);
356 unsigned int count
= rows
* cols
;
357 if (!c
->check_array (matrix
, matrix
[0].static_size
, count
)) return TRACE_RETURN (false);
358 for (unsigned int i
= 0; i
< count
; i
++)
359 if (!matrix
[i
].sanitize (c
, this)) return TRACE_RETURN (false);
360 return TRACE_RETURN (true);
363 USHORT rows
; /* Number of rows */
366 matrix
[VAR
]; /* Matrix of offsets to Anchor tables--
367 * from beginning of AnchorMatrix table */
369 DEFINE_SIZE_ARRAY (2, matrix
);
375 friend struct MarkArray
;
377 inline bool sanitize (hb_sanitize_context_t
*c
, void *base
) {
378 TRACE_SANITIZE (this);
379 return TRACE_RETURN (c
->check_struct (this) && markAnchor
.sanitize (c
, base
));
383 USHORT klass
; /* Class defined for this mark */
385 markAnchor
; /* Offset to Anchor table--from
386 * beginning of MarkArray table */
388 DEFINE_SIZE_STATIC (4);
391 struct MarkArray
: ArrayOf
<MarkRecord
> /* Array of MarkRecords--in Coverage order */
393 inline bool apply (hb_apply_context_t
*c
,
394 unsigned int mark_index
, unsigned int glyph_index
,
395 const AnchorMatrix
&anchors
, unsigned int class_count
,
396 unsigned int glyph_pos
) const
399 hb_buffer_t
*buffer
= c
->buffer
;
400 const MarkRecord
&record
= ArrayOf
<MarkRecord
>::operator[](mark_index
);
401 unsigned int mark_class
= record
.klass
;
403 const Anchor
& mark_anchor
= this + record
.markAnchor
;
405 const Anchor
& glyph_anchor
= anchors
.get_anchor (glyph_index
, mark_class
, class_count
, &found
);
406 /* If this subtable doesn't have an anchor for this base and this class,
407 * return false such that the subsequent subtables have a chance at it. */
408 if (unlikely (!found
)) return TRACE_RETURN (false);
410 hb_position_t mark_x
, mark_y
, base_x
, base_y
;
412 mark_anchor
.get_anchor (c
->font
, buffer
->cur().codepoint
, &mark_x
, &mark_y
);
413 glyph_anchor
.get_anchor (c
->font
, buffer
->info
[glyph_pos
].codepoint
, &base_x
, &base_y
);
415 hb_glyph_position_t
&o
= buffer
->cur_pos();
416 o
.x_offset
= base_x
- mark_x
;
417 o
.y_offset
= base_y
- mark_y
;
418 o
.attach_lookback() = buffer
->idx
- glyph_pos
;
421 return TRACE_RETURN (true);
424 inline bool sanitize (hb_sanitize_context_t
*c
) {
425 TRACE_SANITIZE (this);
426 return TRACE_RETURN (ArrayOf
<MarkRecord
>::sanitize (c
, this));
433 struct SinglePosFormat1
435 inline void collect_glyphs (hb_collect_glyphs_context_t
*c
) const
437 TRACE_COLLECT_GLYPHS (this);
438 (this+coverage
).add_coverage (c
->input
);
441 inline const Coverage
&get_coverage (void) const
443 return this+coverage
;
446 inline bool apply (hb_apply_context_t
*c
) const
449 hb_buffer_t
*buffer
= c
->buffer
;
450 unsigned int index
= (this+coverage
).get_coverage (buffer
->cur().codepoint
);
451 if (likely (index
== NOT_COVERED
)) return TRACE_RETURN (false);
453 valueFormat
.apply_value (c
->font
, c
->direction
, this,
454 values
, buffer
->cur_pos());
457 return TRACE_RETURN (true);
460 inline bool sanitize (hb_sanitize_context_t
*c
) {
461 TRACE_SANITIZE (this);
462 return TRACE_RETURN (c
->check_struct (this) && coverage
.sanitize (c
, this) && valueFormat
.sanitize_value (c
, this, values
));
466 USHORT format
; /* Format identifier--format = 1 */
468 coverage
; /* Offset to Coverage table--from
469 * beginning of subtable */
470 ValueFormat valueFormat
; /* Defines the types of data in the
472 ValueRecord values
; /* Defines positioning
473 * value(s)--applied to all glyphs in
474 * the Coverage table */
476 DEFINE_SIZE_ARRAY (6, values
);
479 struct SinglePosFormat2
481 inline void collect_glyphs (hb_collect_glyphs_context_t
*c
) const
483 TRACE_COLLECT_GLYPHS (this);
484 (this+coverage
).add_coverage (c
->input
);
487 inline const Coverage
&get_coverage (void) const
489 return this+coverage
;
492 inline bool apply (hb_apply_context_t
*c
) const
495 hb_buffer_t
*buffer
= c
->buffer
;
496 unsigned int index
= (this+coverage
).get_coverage (buffer
->cur().codepoint
);
497 if (likely (index
== NOT_COVERED
)) return TRACE_RETURN (false);
499 if (likely (index
>= valueCount
)) return TRACE_RETURN (false);
501 valueFormat
.apply_value (c
->font
, c
->direction
, this,
502 &values
[index
* valueFormat
.get_len ()],
506 return TRACE_RETURN (true);
509 inline bool sanitize (hb_sanitize_context_t
*c
) {
510 TRACE_SANITIZE (this);
511 return TRACE_RETURN (c
->check_struct (this) && coverage
.sanitize (c
, this) && valueFormat
.sanitize_values (c
, this, values
, valueCount
));
515 USHORT format
; /* Format identifier--format = 2 */
517 coverage
; /* Offset to Coverage table--from
518 * beginning of subtable */
519 ValueFormat valueFormat
; /* Defines the types of data in the
521 USHORT valueCount
; /* Number of ValueRecords */
522 ValueRecord values
; /* Array of ValueRecords--positioning
523 * values applied to glyphs */
525 DEFINE_SIZE_ARRAY (8, values
);
530 template <typename context_t
>
531 inline typename
context_t::return_t
dispatch (context_t
*c
) const
533 TRACE_DISPATCH (this);
535 case 1: return TRACE_RETURN (c
->dispatch (u
.format1
));
536 case 2: return TRACE_RETURN (c
->dispatch (u
.format2
));
537 default:return TRACE_RETURN (c
->default_return_value ());
541 inline bool sanitize (hb_sanitize_context_t
*c
) {
542 TRACE_SANITIZE (this);
543 if (!u
.format
.sanitize (c
)) return TRACE_RETURN (false);
545 case 1: return TRACE_RETURN (u
.format1
.sanitize (c
));
546 case 2: return TRACE_RETURN (u
.format2
.sanitize (c
));
547 default:return TRACE_RETURN (true);
553 USHORT format
; /* Format identifier */
554 SinglePosFormat1 format1
;
555 SinglePosFormat2 format2
;
560 struct PairValueRecord
562 friend struct PairSet
;
565 GlyphID secondGlyph
; /* GlyphID of second glyph in the
566 * pair--first glyph is listed in the
568 ValueRecord values
; /* Positioning data for the first glyph
569 * followed by for second glyph */
571 DEFINE_SIZE_ARRAY (2, values
);
576 friend struct PairPosFormat1
;
578 inline void collect_glyphs (hb_collect_glyphs_context_t
*c
,
579 const ValueFormat
*valueFormats
) const
581 TRACE_COLLECT_GLYPHS (this);
582 unsigned int len1
= valueFormats
[0].get_len ();
583 unsigned int len2
= valueFormats
[1].get_len ();
584 unsigned int record_size
= USHORT::static_size
* (1 + len1
+ len2
);
586 const PairValueRecord
*record
= CastP
<PairValueRecord
> (array
);
587 unsigned int count
= len
;
588 for (unsigned int i
= 0; i
< count
; i
++)
590 c
->input
->add (record
->secondGlyph
);
591 record
= &StructAtOffset
<PairValueRecord
> (record
, record_size
);
595 inline bool apply (hb_apply_context_t
*c
,
596 const ValueFormat
*valueFormats
,
597 unsigned int pos
) const
600 hb_buffer_t
*buffer
= c
->buffer
;
601 unsigned int len1
= valueFormats
[0].get_len ();
602 unsigned int len2
= valueFormats
[1].get_len ();
603 unsigned int record_size
= USHORT::static_size
* (1 + len1
+ len2
);
605 const PairValueRecord
*record
= CastP
<PairValueRecord
> (array
);
606 unsigned int count
= len
;
607 for (unsigned int i
= 0; i
< count
; i
++)
610 if (buffer
->info
[pos
].codepoint
== record
->secondGlyph
)
612 valueFormats
[0].apply_value (c
->font
, c
->direction
, this,
613 &record
->values
[0], buffer
->cur_pos());
614 valueFormats
[1].apply_value (c
->font
, c
->direction
, this,
615 &record
->values
[len1
], buffer
->pos
[pos
]);
619 return TRACE_RETURN (true);
621 record
= &StructAtOffset
<PairValueRecord
> (record
, record_size
);
624 return TRACE_RETURN (false);
627 struct sanitize_closure_t
{
629 ValueFormat
*valueFormats
;
630 unsigned int len1
; /* valueFormats[0].get_len() */
631 unsigned int stride
; /* 1 + len1 + len2 */
634 inline bool sanitize (hb_sanitize_context_t
*c
, const sanitize_closure_t
*closure
) {
635 TRACE_SANITIZE (this);
636 if (!(c
->check_struct (this)
637 && c
->check_array (array
, USHORT::static_size
* closure
->stride
, len
))) return TRACE_RETURN (false);
639 unsigned int count
= len
;
640 PairValueRecord
*record
= CastP
<PairValueRecord
> (array
);
641 return TRACE_RETURN (closure
->valueFormats
[0].sanitize_values_stride_unsafe (c
, closure
->base
, &record
->values
[0], count
, closure
->stride
)
642 && closure
->valueFormats
[1].sanitize_values_stride_unsafe (c
, closure
->base
, &record
->values
[closure
->len1
], count
, closure
->stride
));
646 USHORT len
; /* Number of PairValueRecords */
647 USHORT array
[VAR
]; /* Array of PairValueRecords--ordered
648 * by GlyphID of the second glyph */
650 DEFINE_SIZE_ARRAY (2, array
);
653 struct PairPosFormat1
655 inline void collect_glyphs (hb_collect_glyphs_context_t
*c
) const
657 TRACE_COLLECT_GLYPHS (this);
658 (this+coverage
).add_coverage (c
->input
);
659 unsigned int count
= pairSet
.len
;
660 for (unsigned int i
= 0; i
< count
; i
++)
661 (this+pairSet
[i
]).collect_glyphs (c
, &valueFormat1
);
664 inline const Coverage
&get_coverage (void) const
666 return this+coverage
;
669 inline bool apply (hb_apply_context_t
*c
) const
672 hb_buffer_t
*buffer
= c
->buffer
;
673 hb_apply_context_t::skipping_forward_iterator_t
skippy_iter (c
, buffer
->idx
, 1);
674 if (skippy_iter
.has_no_chance ()) return TRACE_RETURN (false);
676 unsigned int index
= (this+coverage
).get_coverage (buffer
->cur().codepoint
);
677 if (likely (index
== NOT_COVERED
)) return TRACE_RETURN (false);
679 if (!skippy_iter
.next ()) return TRACE_RETURN (false);
681 return TRACE_RETURN ((this+pairSet
[index
]).apply (c
, &valueFormat1
, skippy_iter
.idx
));
684 inline bool sanitize (hb_sanitize_context_t
*c
) {
685 TRACE_SANITIZE (this);
687 unsigned int len1
= valueFormat1
.get_len ();
688 unsigned int len2
= valueFormat2
.get_len ();
689 PairSet::sanitize_closure_t closure
= {
696 return TRACE_RETURN (c
->check_struct (this) && coverage
.sanitize (c
, this) && pairSet
.sanitize (c
, this, &closure
));
700 USHORT format
; /* Format identifier--format = 1 */
702 coverage
; /* Offset to Coverage table--from
703 * beginning of subtable */
704 ValueFormat valueFormat1
; /* Defines the types of data in
705 * ValueRecord1--for the first glyph
706 * in the pair--may be zero (0) */
707 ValueFormat valueFormat2
; /* Defines the types of data in
708 * ValueRecord2--for the second glyph
709 * in the pair--may be zero (0) */
710 OffsetArrayOf
<PairSet
>
711 pairSet
; /* Array of PairSet tables
712 * ordered by Coverage Index */
714 DEFINE_SIZE_ARRAY (10, pairSet
);
717 struct PairPosFormat2
719 inline void collect_glyphs (hb_collect_glyphs_context_t
*c
) const
721 TRACE_COLLECT_GLYPHS (this);
722 /* (this+coverage).add_coverage (c->input); // Don't need this. */
724 unsigned int count1
= class1Count
;
725 const ClassDef
&klass1
= this+classDef1
;
726 for (unsigned int i
= 0; i
< count1
; i
++)
727 klass1
.add_class (c
->input
, i
);
729 unsigned int count2
= class2Count
;
730 const ClassDef
&klass2
= this+classDef2
;
731 for (unsigned int i
= 0; i
< count2
; i
++)
732 klass2
.add_class (c
->input
, i
);
735 inline const Coverage
&get_coverage (void) const
737 return this+coverage
;
740 inline bool apply (hb_apply_context_t
*c
) const
743 hb_buffer_t
*buffer
= c
->buffer
;
744 hb_apply_context_t::skipping_forward_iterator_t
skippy_iter (c
, buffer
->idx
, 1);
745 if (skippy_iter
.has_no_chance ()) return TRACE_RETURN (false);
747 unsigned int index
= (this+coverage
).get_coverage (buffer
->cur().codepoint
);
748 if (likely (index
== NOT_COVERED
)) return TRACE_RETURN (false);
750 if (!skippy_iter
.next ()) return TRACE_RETURN (false);
752 unsigned int len1
= valueFormat1
.get_len ();
753 unsigned int len2
= valueFormat2
.get_len ();
754 unsigned int record_len
= len1
+ len2
;
756 unsigned int klass1
= (this+classDef1
).get_class (buffer
->cur().codepoint
);
757 unsigned int klass2
= (this+classDef2
).get_class (buffer
->info
[skippy_iter
.idx
].codepoint
);
758 if (unlikely (klass1
>= class1Count
|| klass2
>= class2Count
)) return TRACE_RETURN (false);
760 const Value
*v
= &values
[record_len
* (klass1
* class2Count
+ klass2
)];
761 valueFormat1
.apply_value (c
->font
, c
->direction
, this,
762 v
, buffer
->cur_pos());
763 valueFormat2
.apply_value (c
->font
, c
->direction
, this,
764 v
+ len1
, buffer
->pos
[skippy_iter
.idx
]);
766 buffer
->idx
= skippy_iter
.idx
;
770 return TRACE_RETURN (true);
773 inline bool sanitize (hb_sanitize_context_t
*c
) {
774 TRACE_SANITIZE (this);
775 if (!(c
->check_struct (this)
776 && coverage
.sanitize (c
, this)
777 && classDef1
.sanitize (c
, this)
778 && classDef2
.sanitize (c
, this))) return TRACE_RETURN (false);
780 unsigned int len1
= valueFormat1
.get_len ();
781 unsigned int len2
= valueFormat2
.get_len ();
782 unsigned int stride
= len1
+ len2
;
783 unsigned int record_size
= valueFormat1
.get_size () + valueFormat2
.get_size ();
784 unsigned int count
= (unsigned int) class1Count
* (unsigned int) class2Count
;
785 return TRACE_RETURN (c
->check_array (values
, record_size
, count
) &&
786 valueFormat1
.sanitize_values_stride_unsafe (c
, this, &values
[0], count
, stride
) &&
787 valueFormat2
.sanitize_values_stride_unsafe (c
, this, &values
[len1
], count
, stride
));
791 USHORT format
; /* Format identifier--format = 2 */
793 coverage
; /* Offset to Coverage table--from
794 * beginning of subtable */
795 ValueFormat valueFormat1
; /* ValueRecord definition--for the
796 * first glyph of the pair--may be zero
798 ValueFormat valueFormat2
; /* ValueRecord definition--for the
799 * second glyph of the pair--may be
802 classDef1
; /* Offset to ClassDef table--from
803 * beginning of PairPos subtable--for
804 * the first glyph of the pair */
806 classDef2
; /* Offset to ClassDef table--from
807 * beginning of PairPos subtable--for
808 * the second glyph of the pair */
809 USHORT class1Count
; /* Number of classes in ClassDef1
810 * table--includes Class0 */
811 USHORT class2Count
; /* Number of classes in ClassDef2
812 * table--includes Class0 */
813 ValueRecord values
; /* Matrix of value pairs:
814 * class1-major, class2-minor,
815 * Each entry has value1 and value2 */
817 DEFINE_SIZE_ARRAY (16, values
);
822 template <typename context_t
>
823 inline typename
context_t::return_t
dispatch (context_t
*c
) const
825 TRACE_DISPATCH (this);
827 case 1: return TRACE_RETURN (c
->dispatch (u
.format1
));
828 case 2: return TRACE_RETURN (c
->dispatch (u
.format2
));
829 default:return TRACE_RETURN (c
->default_return_value ());
833 inline bool sanitize (hb_sanitize_context_t
*c
) {
834 TRACE_SANITIZE (this);
835 if (!u
.format
.sanitize (c
)) return TRACE_RETURN (false);
837 case 1: return TRACE_RETURN (u
.format1
.sanitize (c
));
838 case 2: return TRACE_RETURN (u
.format2
.sanitize (c
));
839 default:return TRACE_RETURN (true);
845 USHORT format
; /* Format identifier */
846 PairPosFormat1 format1
;
847 PairPosFormat2 format2
;
852 struct EntryExitRecord
854 friend struct CursivePosFormat1
;
856 inline bool sanitize (hb_sanitize_context_t
*c
, void *base
) {
857 TRACE_SANITIZE (this);
858 return TRACE_RETURN (entryAnchor
.sanitize (c
, base
) && exitAnchor
.sanitize (c
, base
));
863 entryAnchor
; /* Offset to EntryAnchor table--from
864 * beginning of CursivePos
865 * subtable--may be NULL */
867 exitAnchor
; /* Offset to ExitAnchor table--from
868 * beginning of CursivePos
869 * subtable--may be NULL */
871 DEFINE_SIZE_STATIC (4);
874 struct CursivePosFormat1
876 inline void collect_glyphs (hb_collect_glyphs_context_t
*c
) const
878 TRACE_COLLECT_GLYPHS (this);
879 (this+coverage
).add_coverage (c
->input
);
882 inline const Coverage
&get_coverage (void) const
884 return this+coverage
;
887 inline bool apply (hb_apply_context_t
*c
) const
890 hb_buffer_t
*buffer
= c
->buffer
;
892 /* We don't handle mark glyphs here. */
893 if (unlikely (_hb_glyph_info_is_mark (&buffer
->cur()))) return TRACE_RETURN (false);
895 hb_apply_context_t::skipping_forward_iterator_t
skippy_iter (c
, buffer
->idx
, 1);
896 if (skippy_iter
.has_no_chance ()) return TRACE_RETURN (false);
898 const EntryExitRecord
&this_record
= entryExitRecord
[(this+coverage
).get_coverage (buffer
->cur().codepoint
)];
899 if (!this_record
.exitAnchor
) return TRACE_RETURN (false);
901 if (!skippy_iter
.next ()) return TRACE_RETURN (false);
903 const EntryExitRecord
&next_record
= entryExitRecord
[(this+coverage
).get_coverage (buffer
->info
[skippy_iter
.idx
].codepoint
)];
904 if (!next_record
.entryAnchor
) return TRACE_RETURN (false);
906 unsigned int i
= buffer
->idx
;
907 unsigned int j
= skippy_iter
.idx
;
909 hb_position_t entry_x
, entry_y
, exit_x
, exit_y
;
910 (this+this_record
.exitAnchor
).get_anchor (c
->font
, buffer
->info
[i
].codepoint
, &exit_x
, &exit_y
);
911 (this+next_record
.entryAnchor
).get_anchor (c
->font
, buffer
->info
[j
].codepoint
, &entry_x
, &entry_y
);
913 hb_glyph_position_t
*pos
= buffer
->pos
;
916 /* Main-direction adjustment */
917 switch (c
->direction
) {
918 case HB_DIRECTION_LTR
:
919 pos
[i
].x_advance
= exit_x
+ pos
[i
].x_offset
;
921 d
= entry_x
+ pos
[j
].x_offset
;
922 pos
[j
].x_advance
-= d
;
923 pos
[j
].x_offset
-= d
;
925 case HB_DIRECTION_RTL
:
926 d
= exit_x
+ pos
[i
].x_offset
;
927 pos
[i
].x_advance
-= d
;
928 pos
[i
].x_offset
-= d
;
930 pos
[j
].x_advance
= entry_x
+ pos
[j
].x_offset
;
932 case HB_DIRECTION_TTB
:
933 pos
[i
].y_advance
= exit_y
+ pos
[i
].y_offset
;
935 d
= entry_y
+ pos
[j
].y_offset
;
936 pos
[j
].y_advance
-= d
;
937 pos
[j
].y_offset
-= d
;
939 case HB_DIRECTION_BTT
:
940 d
= exit_y
+ pos
[i
].y_offset
;
941 pos
[i
].y_advance
-= d
;
942 pos
[i
].y_offset
-= d
;
944 pos
[j
].y_advance
= entry_y
;
946 case HB_DIRECTION_INVALID
:
951 /* Cross-direction adjustment */
952 if (c
->lookup_props
& LookupFlag::RightToLeft
) {
953 pos
[i
].cursive_chain() = j
- i
;
954 if (likely (HB_DIRECTION_IS_HORIZONTAL (c
->direction
)))
955 pos
[i
].y_offset
= entry_y
- exit_y
;
957 pos
[i
].x_offset
= entry_x
- exit_x
;
959 pos
[j
].cursive_chain() = i
- j
;
960 if (likely (HB_DIRECTION_IS_HORIZONTAL (c
->direction
)))
961 pos
[j
].y_offset
= exit_y
- entry_y
;
963 pos
[j
].x_offset
= exit_x
- entry_x
;
967 return TRACE_RETURN (true);
970 inline bool sanitize (hb_sanitize_context_t
*c
) {
971 TRACE_SANITIZE (this);
972 return TRACE_RETURN (coverage
.sanitize (c
, this) && entryExitRecord
.sanitize (c
, this));
976 USHORT format
; /* Format identifier--format = 1 */
978 coverage
; /* Offset to Coverage table--from
979 * beginning of subtable */
980 ArrayOf
<EntryExitRecord
>
981 entryExitRecord
; /* Array of EntryExit records--in
982 * Coverage Index order */
984 DEFINE_SIZE_ARRAY (6, entryExitRecord
);
989 template <typename context_t
>
990 inline typename
context_t::return_t
dispatch (context_t
*c
) const
992 TRACE_DISPATCH (this);
994 case 1: return TRACE_RETURN (c
->dispatch (u
.format1
));
995 default:return TRACE_RETURN (c
->default_return_value ());
999 inline bool sanitize (hb_sanitize_context_t
*c
) {
1000 TRACE_SANITIZE (this);
1001 if (!u
.format
.sanitize (c
)) return TRACE_RETURN (false);
1003 case 1: return TRACE_RETURN (u
.format1
.sanitize (c
));
1004 default:return TRACE_RETURN (true);
1010 USHORT format
; /* Format identifier */
1011 CursivePosFormat1 format1
;
1016 typedef AnchorMatrix BaseArray
; /* base-major--
1017 * in order of BaseCoverage Index--,
1019 * ordered by class--zero-based. */
1021 struct MarkBasePosFormat1
1023 inline void collect_glyphs (hb_collect_glyphs_context_t
*c
) const
1025 TRACE_COLLECT_GLYPHS (this);
1026 (this+markCoverage
).add_coverage (c
->input
);
1027 (this+baseCoverage
).add_coverage (c
->input
);
1030 inline const Coverage
&get_coverage (void) const
1032 return this+markCoverage
;
1035 inline bool apply (hb_apply_context_t
*c
) const
1038 hb_buffer_t
*buffer
= c
->buffer
;
1039 unsigned int mark_index
= (this+markCoverage
).get_coverage (buffer
->cur().codepoint
);
1040 if (likely (mark_index
== NOT_COVERED
)) return TRACE_RETURN (false);
1042 /* now we search backwards for a non-mark glyph */
1043 hb_apply_context_t::skipping_backward_iterator_t
skippy_iter (c
, buffer
->idx
, 1);
1044 skippy_iter
.set_lookup_props (LookupFlag::IgnoreMarks
);
1046 if (!skippy_iter
.prev ()) return TRACE_RETURN (false);
1047 /* We only want to attach to the first of a MultipleSubst sequence. Reject others. */
1048 if (0 == _hb_glyph_info_get_lig_comp (&buffer
->info
[skippy_iter
.idx
])) break;
1049 skippy_iter
.reject ();
1052 /* Checking that matched glyph is actually a base glyph by GDEF is too strong; disabled */
1053 if (!_hb_glyph_info_is_base_glyph (&buffer
->info
[skippy_iter
.idx
])) { /*return TRACE_RETURN (false);*/ }
1055 unsigned int base_index
= (this+baseCoverage
).get_coverage (buffer
->info
[skippy_iter
.idx
].codepoint
);
1056 if (base_index
== NOT_COVERED
) return TRACE_RETURN (false);
1058 return TRACE_RETURN ((this+markArray
).apply (c
, mark_index
, base_index
, this+baseArray
, classCount
, skippy_iter
.idx
));
1061 inline bool sanitize (hb_sanitize_context_t
*c
) {
1062 TRACE_SANITIZE (this);
1063 return TRACE_RETURN (c
->check_struct (this) && markCoverage
.sanitize (c
, this) && baseCoverage
.sanitize (c
, this) &&
1064 markArray
.sanitize (c
, this) && baseArray
.sanitize (c
, this, (unsigned int) classCount
));
1068 USHORT format
; /* Format identifier--format = 1 */
1070 markCoverage
; /* Offset to MarkCoverage table--from
1071 * beginning of MarkBasePos subtable */
1073 baseCoverage
; /* Offset to BaseCoverage table--from
1074 * beginning of MarkBasePos subtable */
1075 USHORT classCount
; /* Number of classes defined for marks */
1077 markArray
; /* Offset to MarkArray table--from
1078 * beginning of MarkBasePos subtable */
1080 baseArray
; /* Offset to BaseArray table--from
1081 * beginning of MarkBasePos subtable */
1083 DEFINE_SIZE_STATIC (12);
1088 template <typename context_t
>
1089 inline typename
context_t::return_t
dispatch (context_t
*c
) const
1091 TRACE_DISPATCH (this);
1093 case 1: return TRACE_RETURN (c
->dispatch (u
.format1
));
1094 default:return TRACE_RETURN (c
->default_return_value ());
1098 inline bool sanitize (hb_sanitize_context_t
*c
) {
1099 TRACE_SANITIZE (this);
1100 if (!u
.format
.sanitize (c
)) return TRACE_RETURN (false);
1102 case 1: return TRACE_RETURN (u
.format1
.sanitize (c
));
1103 default:return TRACE_RETURN (true);
1109 USHORT format
; /* Format identifier */
1110 MarkBasePosFormat1 format1
;
1115 typedef AnchorMatrix LigatureAttach
; /* component-major--
1116 * in order of writing direction--,
1118 * ordered by class--zero-based. */
1120 typedef OffsetListOf
<LigatureAttach
> LigatureArray
;
1121 /* Array of LigatureAttach
1123 * LigatureCoverage Index */
1125 struct MarkLigPosFormat1
1127 inline void collect_glyphs (hb_collect_glyphs_context_t
*c
) const
1129 TRACE_COLLECT_GLYPHS (this);
1130 (this+markCoverage
).add_coverage (c
->input
);
1131 (this+ligatureCoverage
).add_coverage (c
->input
);
1134 inline const Coverage
&get_coverage (void) const
1136 return this+markCoverage
;
1139 inline bool apply (hb_apply_context_t
*c
) const
1142 hb_buffer_t
*buffer
= c
->buffer
;
1143 unsigned int mark_index
= (this+markCoverage
).get_coverage (buffer
->cur().codepoint
);
1144 if (likely (mark_index
== NOT_COVERED
)) return TRACE_RETURN (false);
1146 /* now we search backwards for a non-mark glyph */
1147 hb_apply_context_t::skipping_backward_iterator_t
skippy_iter (c
, buffer
->idx
, 1);
1148 skippy_iter
.set_lookup_props (LookupFlag::IgnoreMarks
);
1149 if (!skippy_iter
.prev ()) return TRACE_RETURN (false);
1151 /* Checking that matched glyph is actually a ligature by GDEF is too strong; disabled */
1152 if (!_hb_glyph_info_is_ligature (&buffer
->info
[skippy_iter
.idx
])) { /*return TRACE_RETURN (false);*/ }
1154 unsigned int j
= skippy_iter
.idx
;
1155 unsigned int lig_index
= (this+ligatureCoverage
).get_coverage (buffer
->info
[j
].codepoint
);
1156 if (lig_index
== NOT_COVERED
) return TRACE_RETURN (false);
1158 const LigatureArray
& lig_array
= this+ligatureArray
;
1159 const LigatureAttach
& lig_attach
= lig_array
[lig_index
];
1161 /* Find component to attach to */
1162 unsigned int comp_count
= lig_attach
.rows
;
1163 if (unlikely (!comp_count
)) return TRACE_RETURN (false);
1165 /* We must now check whether the ligature ID of the current mark glyph
1166 * is identical to the ligature ID of the found ligature. If yes, we
1167 * can directly use the component index. If not, we attach the mark
1168 * glyph to the last component of the ligature. */
1169 unsigned int comp_index
;
1170 unsigned int lig_id
= _hb_glyph_info_get_lig_id (&buffer
->info
[j
]);
1171 unsigned int mark_id
= _hb_glyph_info_get_lig_id (&buffer
->cur());
1172 unsigned int mark_comp
= _hb_glyph_info_get_lig_comp (&buffer
->cur());
1173 if (lig_id
&& lig_id
== mark_id
&& mark_comp
> 0)
1174 comp_index
= MIN (comp_count
, _hb_glyph_info_get_lig_comp (&buffer
->cur())) - 1;
1176 comp_index
= comp_count
- 1;
1178 return TRACE_RETURN ((this+markArray
).apply (c
, mark_index
, comp_index
, lig_attach
, classCount
, j
));
1181 inline bool sanitize (hb_sanitize_context_t
*c
) {
1182 TRACE_SANITIZE (this);
1183 return TRACE_RETURN (c
->check_struct (this) && markCoverage
.sanitize (c
, this) && ligatureCoverage
.sanitize (c
, this) &&
1184 markArray
.sanitize (c
, this) && ligatureArray
.sanitize (c
, this, (unsigned int) classCount
));
1188 USHORT format
; /* Format identifier--format = 1 */
1190 markCoverage
; /* Offset to Mark Coverage table--from
1191 * beginning of MarkLigPos subtable */
1193 ligatureCoverage
; /* Offset to Ligature Coverage
1194 * table--from beginning of MarkLigPos
1196 USHORT classCount
; /* Number of defined mark classes */
1198 markArray
; /* Offset to MarkArray table--from
1199 * beginning of MarkLigPos subtable */
1200 OffsetTo
<LigatureArray
>
1201 ligatureArray
; /* Offset to LigatureArray table--from
1202 * beginning of MarkLigPos subtable */
1204 DEFINE_SIZE_STATIC (12);
1209 template <typename context_t
>
1210 inline typename
context_t::return_t
dispatch (context_t
*c
) const
1212 TRACE_DISPATCH (this);
1214 case 1: return TRACE_RETURN (c
->dispatch (u
.format1
));
1215 default:return TRACE_RETURN (c
->default_return_value ());
1219 inline bool sanitize (hb_sanitize_context_t
*c
) {
1220 TRACE_SANITIZE (this);
1221 if (!u
.format
.sanitize (c
)) return TRACE_RETURN (false);
1223 case 1: return TRACE_RETURN (u
.format1
.sanitize (c
));
1224 default:return TRACE_RETURN (true);
1230 USHORT format
; /* Format identifier */
1231 MarkLigPosFormat1 format1
;
1236 typedef AnchorMatrix Mark2Array
; /* mark2-major--
1237 * in order of Mark2Coverage Index--,
1239 * ordered by class--zero-based. */
1241 struct MarkMarkPosFormat1
1243 inline void collect_glyphs (hb_collect_glyphs_context_t
*c
) const
1245 TRACE_COLLECT_GLYPHS (this);
1246 (this+mark1Coverage
).add_coverage (c
->input
);
1247 (this+mark2Coverage
).add_coverage (c
->input
);
1250 inline const Coverage
&get_coverage (void) const
1252 return this+mark1Coverage
;
1255 inline bool apply (hb_apply_context_t
*c
) const
1258 hb_buffer_t
*buffer
= c
->buffer
;
1259 unsigned int mark1_index
= (this+mark1Coverage
).get_coverage (buffer
->cur().codepoint
);
1260 if (likely (mark1_index
== NOT_COVERED
)) return TRACE_RETURN (false);
1262 /* now we search backwards for a suitable mark glyph until a non-mark glyph */
1263 hb_apply_context_t::skipping_backward_iterator_t
skippy_iter (c
, buffer
->idx
, 1);
1264 skippy_iter
.set_lookup_props (c
->lookup_props
& ~LookupFlag::IgnoreFlags
);
1265 if (!skippy_iter
.prev ()) return TRACE_RETURN (false);
1267 if (!_hb_glyph_info_is_mark (&buffer
->info
[skippy_iter
.idx
])) { return TRACE_RETURN (false); }
1269 unsigned int j
= skippy_iter
.idx
;
1271 unsigned int id1
= _hb_glyph_info_get_lig_id (&buffer
->cur());
1272 unsigned int id2
= _hb_glyph_info_get_lig_id (&buffer
->info
[j
]);
1273 unsigned int comp1
= _hb_glyph_info_get_lig_comp (&buffer
->cur());
1274 unsigned int comp2
= _hb_glyph_info_get_lig_comp (&buffer
->info
[j
]);
1276 if (likely (id1
== id2
)) {
1277 if (id1
== 0) /* Marks belonging to the same base. */
1279 else if (comp1
== comp2
) /* Marks belonging to the same ligature component. */
1282 /* If ligature ids don't match, it may be the case that one of the marks
1283 * itself is a ligature. In which case match. */
1284 if ((id1
> 0 && !comp1
) || (id2
> 0 && !comp2
))
1289 return TRACE_RETURN (false);
1292 unsigned int mark2_index
= (this+mark2Coverage
).get_coverage (buffer
->info
[j
].codepoint
);
1293 if (mark2_index
== NOT_COVERED
) return TRACE_RETURN (false);
1295 return TRACE_RETURN ((this+mark1Array
).apply (c
, mark1_index
, mark2_index
, this+mark2Array
, classCount
, j
));
1298 inline bool sanitize (hb_sanitize_context_t
*c
) {
1299 TRACE_SANITIZE (this);
1300 return TRACE_RETURN (c
->check_struct (this) && mark1Coverage
.sanitize (c
, this) &&
1301 mark2Coverage
.sanitize (c
, this) && mark1Array
.sanitize (c
, this)
1302 && mark2Array
.sanitize (c
, this, (unsigned int) classCount
));
1306 USHORT format
; /* Format identifier--format = 1 */
1308 mark1Coverage
; /* Offset to Combining Mark1 Coverage
1309 * table--from beginning of MarkMarkPos
1312 mark2Coverage
; /* Offset to Combining Mark2 Coverage
1313 * table--from beginning of MarkMarkPos
1315 USHORT classCount
; /* Number of defined mark classes */
1317 mark1Array
; /* Offset to Mark1Array table--from
1318 * beginning of MarkMarkPos subtable */
1319 OffsetTo
<Mark2Array
>
1320 mark2Array
; /* Offset to Mark2Array table--from
1321 * beginning of MarkMarkPos subtable */
1323 DEFINE_SIZE_STATIC (12);
1328 template <typename context_t
>
1329 inline typename
context_t::return_t
dispatch (context_t
*c
) const
1331 TRACE_DISPATCH (this);
1333 case 1: return TRACE_RETURN (c
->dispatch (u
.format1
));
1334 default:return TRACE_RETURN (c
->default_return_value ());
1338 inline bool sanitize (hb_sanitize_context_t
*c
) {
1339 TRACE_SANITIZE (this);
1340 if (!u
.format
.sanitize (c
)) return TRACE_RETURN (false);
1342 case 1: return TRACE_RETURN (u
.format1
.sanitize (c
));
1343 default:return TRACE_RETURN (true);
1349 USHORT format
; /* Format identifier */
1350 MarkMarkPosFormat1 format1
;
1355 struct ContextPos
: Context
{};
1357 struct ChainContextPos
: ChainContext
{};
1359 struct ExtensionPos
: Extension
<ExtensionPos
>
1361 typedef struct PosLookupSubTable LookupSubTable
;
1371 struct PosLookupSubTable
1373 friend struct PosLookup
;
1387 template <typename context_t
>
1388 inline typename
context_t::return_t
dispatch (context_t
*c
, unsigned int lookup_type
) const
1390 TRACE_DISPATCH (this);
1391 switch (lookup_type
) {
1392 case Single
: return TRACE_RETURN (u
.single
.dispatch (c
));
1393 case Pair
: return TRACE_RETURN (u
.pair
.dispatch (c
));
1394 case Cursive
: return TRACE_RETURN (u
.cursive
.dispatch (c
));
1395 case MarkBase
: return TRACE_RETURN (u
.markBase
.dispatch (c
));
1396 case MarkLig
: return TRACE_RETURN (u
.markLig
.dispatch (c
));
1397 case MarkMark
: return TRACE_RETURN (u
.markMark
.dispatch (c
));
1398 case Context
: return TRACE_RETURN (u
.context
.dispatch (c
));
1399 case ChainContext
: return TRACE_RETURN (u
.chainContext
.dispatch (c
));
1400 case Extension
: return TRACE_RETURN (u
.extension
.dispatch (c
));
1401 default: return TRACE_RETURN (c
->default_return_value ());
1405 inline bool sanitize (hb_sanitize_context_t
*c
, unsigned int lookup_type
) {
1406 TRACE_SANITIZE (this);
1407 if (!u
.header
.sub_format
.sanitize (c
))
1408 return TRACE_RETURN (false);
1409 switch (lookup_type
) {
1410 case Single
: return TRACE_RETURN (u
.single
.sanitize (c
));
1411 case Pair
: return TRACE_RETURN (u
.pair
.sanitize (c
));
1412 case Cursive
: return TRACE_RETURN (u
.cursive
.sanitize (c
));
1413 case MarkBase
: return TRACE_RETURN (u
.markBase
.sanitize (c
));
1414 case MarkLig
: return TRACE_RETURN (u
.markLig
.sanitize (c
));
1415 case MarkMark
: return TRACE_RETURN (u
.markMark
.sanitize (c
));
1416 case Context
: return TRACE_RETURN (u
.context
.sanitize (c
));
1417 case ChainContext
: return TRACE_RETURN (u
.chainContext
.sanitize (c
));
1418 case Extension
: return TRACE_RETURN (u
.extension
.sanitize (c
));
1419 default: return TRACE_RETURN (true);
1431 MarkBasePos markBase
;
1433 MarkMarkPos markMark
;
1435 ChainContextPos chainContext
;
1436 ExtensionPos extension
;
1439 DEFINE_SIZE_UNION (2, header
.sub_format
);
1443 struct PosLookup
: Lookup
1445 inline const PosLookupSubTable
& get_subtable (unsigned int i
) const
1446 { return this+CastR
<OffsetArrayOf
<PosLookupSubTable
> > (subTable
)[i
]; }
1448 inline bool is_reverse (void) const
1453 inline hb_collect_glyphs_context_t::return_t
collect_glyphs (hb_collect_glyphs_context_t
*c
) const
1455 TRACE_COLLECT_GLYPHS (this);
1456 c
->set_recurse_func (NULL
);
1457 return TRACE_RETURN (dispatch (c
));
1460 template <typename set_t
>
1461 inline void add_coverage (set_t
*glyphs
) const
1463 hb_get_coverage_context_t c
;
1464 const Coverage
*last
= NULL
;
1465 unsigned int count
= get_subtable_count ();
1466 for (unsigned int i
= 0; i
< count
; i
++) {
1467 const Coverage
*coverage
= &get_subtable (i
).dispatch (&c
, get_type ());
1468 if (coverage
!= last
) {
1469 coverage
->add_coverage (glyphs
);
1475 inline bool apply_once (hb_apply_context_t
*c
) const
1478 if (!c
->check_glyph_property (&c
->buffer
->cur(), c
->lookup_props
))
1479 return TRACE_RETURN (false);
1480 return TRACE_RETURN (dispatch (c
));
1483 static bool apply_recurse_func (hb_apply_context_t
*c
, unsigned int lookup_index
);
1485 template <typename context_t
>
1486 static inline typename
context_t::return_t
dispatch_recurse_func (context_t
*c
, unsigned int lookup_index
);
1488 template <typename context_t
>
1489 inline typename
context_t::return_t
dispatch (context_t
*c
) const
1491 TRACE_DISPATCH (this);
1492 unsigned int lookup_type
= get_type ();
1493 unsigned int count
= get_subtable_count ();
1494 for (unsigned int i
= 0; i
< count
; i
++) {
1495 typename
context_t::return_t r
= get_subtable (i
).dispatch (c
, lookup_type
);
1496 if (c
->stop_sublookup_iteration (r
))
1497 return TRACE_RETURN (r
);
1499 return TRACE_RETURN (c
->default_return_value ());
1502 inline bool sanitize (hb_sanitize_context_t
*c
) {
1503 TRACE_SANITIZE (this);
1504 if (unlikely (!Lookup::sanitize (c
))) return TRACE_RETURN (false);
1505 OffsetArrayOf
<PosLookupSubTable
> &list
= CastR
<OffsetArrayOf
<PosLookupSubTable
> > (subTable
);
1506 return TRACE_RETURN (list
.sanitize (c
, this, get_type ()));
1510 typedef OffsetListOf
<PosLookup
> PosLookupList
;
1513 * GPOS -- The Glyph Positioning Table
1516 struct GPOS
: GSUBGPOS
1518 static const hb_tag_t tableTag
= HB_OT_TAG_GPOS
;
1520 inline const PosLookup
& get_lookup (unsigned int i
) const
1521 { return CastR
<PosLookup
> (GSUBGPOS::get_lookup (i
)); }
1523 static inline void position_start (hb_font_t
*font
, hb_buffer_t
*buffer
);
1524 static inline void position_finish (hb_font_t
*font
, hb_buffer_t
*buffer
);
1526 inline bool sanitize (hb_sanitize_context_t
*c
) {
1527 TRACE_SANITIZE (this);
1528 if (unlikely (!GSUBGPOS::sanitize (c
))) return TRACE_RETURN (false);
1529 OffsetTo
<PosLookupList
> &list
= CastR
<OffsetTo
<PosLookupList
> > (lookupList
);
1530 return TRACE_RETURN (list
.sanitize (c
, this));
1533 DEFINE_SIZE_STATIC (10);
1538 fix_cursive_minor_offset (hb_glyph_position_t
*pos
, unsigned int i
, hb_direction_t direction
)
1540 unsigned int j
= pos
[i
].cursive_chain();
1546 pos
[i
].cursive_chain() = 0;
1548 fix_cursive_minor_offset (pos
, j
, direction
);
1550 if (HB_DIRECTION_IS_HORIZONTAL (direction
))
1551 pos
[i
].y_offset
+= pos
[j
].y_offset
;
1553 pos
[i
].x_offset
+= pos
[j
].x_offset
;
1557 fix_mark_attachment (hb_glyph_position_t
*pos
, unsigned int i
, hb_direction_t direction
)
1559 if (likely (!(pos
[i
].attach_lookback())))
1562 unsigned int j
= i
- pos
[i
].attach_lookback();
1564 pos
[i
].x_offset
+= pos
[j
].x_offset
;
1565 pos
[i
].y_offset
+= pos
[j
].y_offset
;
1567 if (HB_DIRECTION_IS_FORWARD (direction
))
1568 for (unsigned int k
= j
; k
< i
; k
++) {
1569 pos
[i
].x_offset
-= pos
[k
].x_advance
;
1570 pos
[i
].y_offset
-= pos
[k
].y_advance
;
1573 for (unsigned int k
= j
+ 1; k
< i
+ 1; k
++) {
1574 pos
[i
].x_offset
+= pos
[k
].x_advance
;
1575 pos
[i
].y_offset
+= pos
[k
].y_advance
;
1580 GPOS::position_start (hb_font_t
*font HB_UNUSED
, hb_buffer_t
*buffer
)
1582 buffer
->clear_positions ();
1584 unsigned int count
= buffer
->len
;
1585 for (unsigned int i
= 0; i
< count
; i
++)
1586 buffer
->pos
[i
].attach_lookback() = buffer
->pos
[i
].cursive_chain() = 0;
1590 GPOS::position_finish (hb_font_t
*font HB_UNUSED
, hb_buffer_t
*buffer
)
1592 _hb_buffer_assert_gsubgpos_vars (buffer
);
1595 hb_glyph_position_t
*pos
= hb_buffer_get_glyph_positions (buffer
, &len
);
1596 hb_direction_t direction
= buffer
->props
.direction
;
1598 /* Handle cursive connections */
1599 for (unsigned int i
= 0; i
< len
; i
++)
1600 fix_cursive_minor_offset (pos
, i
, direction
);
1602 /* Handle attachments */
1603 for (unsigned int i
= 0; i
< len
; i
++)
1604 fix_mark_attachment (pos
, i
, direction
);
1608 /* Out-of-class implementation for methods recursing */
1610 template <typename context_t
>
1611 /*static*/ inline typename
context_t::return_t
PosLookup::dispatch_recurse_func (context_t
*c
, unsigned int lookup_index
)
1613 const GPOS
&gpos
= *(hb_ot_layout_from_face (c
->face
)->gpos
);
1614 const PosLookup
&l
= gpos
.get_lookup (lookup_index
);
1615 return l
.dispatch (c
);
1618 /*static*/ inline bool PosLookup::apply_recurse_func (hb_apply_context_t
*c
, unsigned int lookup_index
)
1620 const GPOS
&gpos
= *(hb_ot_layout_from_face (c
->face
)->gpos
);
1621 const PosLookup
&l
= gpos
.get_lookup (lookup_index
);
1622 unsigned int saved_lookup_props
= c
->lookup_props
;
1624 bool ret
= l
.apply_once (c
);
1625 c
->lookup_props
= saved_lookup_props
;
1630 #undef attach_lookback
1631 #undef cursive_chain
1634 } /* namespace OT */
1637 #endif /* HB_OT_LAYOUT_GPOS_TABLE_HH */