2 * Copyright © 2007,2008,2009,2010 Red Hat, Inc.
3 * Copyright © 2010,2012,2013 Google, Inc.
5 * This is part of HarfBuzz, a text shaping library.
7 * Permission is hereby granted, without written agreement and without
8 * license or royalty fees, to use, copy, modify, and distribute this
9 * software and its documentation for any purpose, provided that the
10 * above copyright notice and the following two paragraphs appear in
11 * all copies of this software.
13 * IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR
14 * DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES
15 * ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN
16 * IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
19 * THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING,
20 * BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
21 * FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE PROVIDED HEREUNDER IS
22 * ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO
23 * PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
25 * Red Hat Author(s): Behdad Esfahbod
26 * Google Author(s): Behdad Esfahbod
29 #ifndef HB_OT_LAYOUT_GPOS_TABLE_HH
30 #define HB_OT_LAYOUT_GPOS_TABLE_HH
32 #include "hb-ot-layout-gsubgpos-private.hh"
38 /* buffer **position** var allocations */
39 #define attach_lookback() var.u16[0] /* number of glyphs to go back to attach this glyph to its base */
40 #define cursive_chain() var.i16[1] /* character to which this connects, may be positive or negative */
43 /* Shared Tables: ValueRecord, Anchor Table, and MarkArray */
47 typedef Value ValueRecord
[VAR
];
49 struct ValueFormat
: USHORT
52 xPlacement
= 0x0001, /* Includes horizontal adjustment for placement */
53 yPlacement
= 0x0002, /* Includes vertical adjustment for placement */
54 xAdvance
= 0x0004, /* Includes horizontal adjustment for advance */
55 yAdvance
= 0x0008, /* Includes vertical adjustment for advance */
56 xPlaDevice
= 0x0010, /* Includes horizontal Device table for placement */
57 yPlaDevice
= 0x0020, /* Includes vertical Device table for placement */
58 xAdvDevice
= 0x0040, /* Includes horizontal Device table for advance */
59 yAdvDevice
= 0x0080, /* Includes vertical Device table for advance */
60 ignored
= 0x0F00, /* Was used in TrueType Open for MM fonts */
61 reserved
= 0xF000, /* For future use */
63 devices
= 0x00F0 /* Mask for having any Device table */
66 /* All fields are options. Only those available advance the value pointer. */
68 SHORT xPlacement
; /* Horizontal adjustment for
69 * placement--in design units */
70 SHORT yPlacement
; /* Vertical adjustment for
71 * placement--in design units */
72 SHORT xAdvance
; /* Horizontal adjustment for
73 * advance--in design units (only used
74 * for horizontal writing) */
75 SHORT yAdvance
; /* Vertical adjustment for advance--in
76 * design units (only used for vertical
78 Offset xPlaDevice
; /* Offset to Device table for
79 * horizontal placement--measured from
80 * beginning of PosTable (may be NULL) */
81 Offset yPlaDevice
; /* Offset to Device table for vertical
82 * placement--measured from beginning
83 * of PosTable (may be NULL) */
84 Offset xAdvDevice
; /* Offset to Device table for
85 * horizontal advance--measured from
86 * beginning of PosTable (may be NULL) */
87 Offset yAdvDevice
; /* Offset to Device table for vertical
88 * advance--measured from beginning of
89 * PosTable (may be NULL) */
92 inline unsigned int get_len (void) const
93 { return _hb_popcount32 ((unsigned int) *this); }
94 inline unsigned int get_size (void) const
95 { return get_len () * Value::static_size
; }
97 void apply_value (hb_font_t
*font
,
98 hb_direction_t direction
,
101 hb_glyph_position_t
&glyph_pos
) const
103 unsigned int x_ppem
, y_ppem
;
104 unsigned int format
= *this;
105 hb_bool_t horizontal
= HB_DIRECTION_IS_HORIZONTAL (direction
);
109 if (format
& xPlacement
) glyph_pos
.x_offset
+= font
->em_scale_x (get_short (values
++));
110 if (format
& yPlacement
) glyph_pos
.y_offset
+= font
->em_scale_y (get_short (values
++));
111 if (format
& xAdvance
) {
112 if (likely (horizontal
)) glyph_pos
.x_advance
+= font
->em_scale_x (get_short (values
++)); else values
++;
114 /* y_advance values grow downward but font-space grows upward, hence negation */
115 if (format
& yAdvance
) {
116 if (unlikely (!horizontal
)) glyph_pos
.y_advance
-= font
->em_scale_y (get_short (values
++)); else values
++;
119 if (!has_device ()) return;
121 x_ppem
= font
->x_ppem
;
122 y_ppem
= font
->y_ppem
;
124 if (!x_ppem
&& !y_ppem
) return;
126 /* pixel -> fractional pixel */
127 if (format
& xPlaDevice
) {
128 if (x_ppem
) glyph_pos
.x_offset
+= (base
+ get_device (values
++)).get_x_delta (font
); else values
++;
130 if (format
& yPlaDevice
) {
131 if (y_ppem
) glyph_pos
.y_offset
+= (base
+ get_device (values
++)).get_y_delta (font
); else values
++;
133 if (format
& xAdvDevice
) {
134 if (horizontal
&& x_ppem
) glyph_pos
.x_advance
+= (base
+ get_device (values
++)).get_x_delta (font
); else values
++;
136 if (format
& yAdvDevice
) {
137 /* y_advance values grow downward but font-space grows upward, hence negation */
138 if (!horizontal
&& y_ppem
) glyph_pos
.y_advance
-= (base
+ get_device (values
++)).get_y_delta (font
); else values
++;
143 inline bool sanitize_value_devices (hb_sanitize_context_t
*c
, void *base
, Value
*values
) {
144 unsigned int format
= *this;
146 if (format
& xPlacement
) values
++;
147 if (format
& yPlacement
) values
++;
148 if (format
& xAdvance
) values
++;
149 if (format
& yAdvance
) values
++;
151 if ((format
& xPlaDevice
) && !get_device (values
++).sanitize (c
, base
)) return false;
152 if ((format
& yPlaDevice
) && !get_device (values
++).sanitize (c
, base
)) return false;
153 if ((format
& xAdvDevice
) && !get_device (values
++).sanitize (c
, base
)) return false;
154 if ((format
& yAdvDevice
) && !get_device (values
++).sanitize (c
, base
)) return false;
159 static inline OffsetTo
<Device
>& get_device (Value
* value
)
160 { return *CastP
<OffsetTo
<Device
> > (value
); }
161 static inline const OffsetTo
<Device
>& get_device (const Value
* value
)
162 { return *CastP
<OffsetTo
<Device
> > (value
); }
164 static inline const SHORT
& get_short (const Value
* value
)
165 { return *CastP
<SHORT
> (value
); }
169 inline bool has_device (void) const {
170 unsigned int format
= *this;
171 return (format
& devices
) != 0;
174 inline bool sanitize_value (hb_sanitize_context_t
*c
, void *base
, Value
*values
) {
175 TRACE_SANITIZE (this);
176 return TRACE_RETURN (c
->check_range (values
, get_size ()) && (!has_device () || sanitize_value_devices (c
, base
, values
)));
179 inline bool sanitize_values (hb_sanitize_context_t
*c
, void *base
, Value
*values
, unsigned int count
) {
180 TRACE_SANITIZE (this);
181 unsigned int len
= get_len ();
183 if (!c
->check_array (values
, get_size (), count
)) return TRACE_RETURN (false);
185 if (!has_device ()) return TRACE_RETURN (true);
187 for (unsigned int i
= 0; i
< count
; i
++) {
188 if (!sanitize_value_devices (c
, base
, values
))
189 return TRACE_RETURN (false);
193 return TRACE_RETURN (true);
196 /* Just sanitize referenced Device tables. Doesn't check the values themselves. */
197 inline bool sanitize_values_stride_unsafe (hb_sanitize_context_t
*c
, void *base
, Value
*values
, unsigned int count
, unsigned int stride
) {
198 TRACE_SANITIZE (this);
200 if (!has_device ()) return TRACE_RETURN (true);
202 for (unsigned int i
= 0; i
< count
; i
++) {
203 if (!sanitize_value_devices (c
, base
, values
))
204 return TRACE_RETURN (false);
208 return TRACE_RETURN (true);
215 inline void get_anchor (hb_font_t
*font
, hb_codepoint_t glyph_id HB_UNUSED
,
216 hb_position_t
*x
, hb_position_t
*y
) const
218 *x
= font
->em_scale_x (xCoordinate
);
219 *y
= font
->em_scale_y (yCoordinate
);
222 inline bool sanitize (hb_sanitize_context_t
*c
) {
223 TRACE_SANITIZE (this);
224 return TRACE_RETURN (c
->check_struct (this));
228 USHORT format
; /* Format identifier--format = 1 */
229 SHORT xCoordinate
; /* Horizontal value--in design units */
230 SHORT yCoordinate
; /* Vertical value--in design units */
232 DEFINE_SIZE_STATIC (6);
237 inline void get_anchor (hb_font_t
*font
, hb_codepoint_t glyph_id
,
238 hb_position_t
*x
, hb_position_t
*y
) const
240 unsigned int x_ppem
= font
->x_ppem
;
241 unsigned int y_ppem
= font
->y_ppem
;
242 hb_position_t cx
, cy
;
243 hb_bool_t ret
= false;
245 if (x_ppem
|| y_ppem
)
246 ret
= font
->get_glyph_contour_point_for_origin (glyph_id
, anchorPoint
, HB_DIRECTION_LTR
, &cx
, &cy
);
247 *x
= x_ppem
&& ret
? cx
: font
->em_scale_x (xCoordinate
);
248 *y
= y_ppem
&& ret
? cy
: font
->em_scale_y (yCoordinate
);
251 inline bool sanitize (hb_sanitize_context_t
*c
) {
252 TRACE_SANITIZE (this);
253 return TRACE_RETURN (c
->check_struct (this));
257 USHORT format
; /* Format identifier--format = 2 */
258 SHORT xCoordinate
; /* Horizontal value--in design units */
259 SHORT yCoordinate
; /* Vertical value--in design units */
260 USHORT anchorPoint
; /* Index to glyph contour point */
262 DEFINE_SIZE_STATIC (8);
267 inline void get_anchor (hb_font_t
*font
, hb_codepoint_t glyph_id HB_UNUSED
,
268 hb_position_t
*x
, hb_position_t
*y
) const
270 *x
= font
->em_scale_x (xCoordinate
);
271 *y
= font
->em_scale_y (yCoordinate
);
274 *x
+= (this+xDeviceTable
).get_x_delta (font
);
276 *y
+= (this+yDeviceTable
).get_x_delta (font
);
279 inline bool sanitize (hb_sanitize_context_t
*c
) {
280 TRACE_SANITIZE (this);
281 return TRACE_RETURN (c
->check_struct (this) && xDeviceTable
.sanitize (c
, this) && yDeviceTable
.sanitize (c
, this));
285 USHORT format
; /* Format identifier--format = 3 */
286 SHORT xCoordinate
; /* Horizontal value--in design units */
287 SHORT yCoordinate
; /* Vertical value--in design units */
289 xDeviceTable
; /* Offset to Device table for X
290 * coordinate-- from beginning of
291 * Anchor table (may be NULL) */
293 yDeviceTable
; /* Offset to Device table for Y
294 * coordinate-- from beginning of
295 * Anchor table (may be NULL) */
297 DEFINE_SIZE_STATIC (10);
302 inline void get_anchor (hb_font_t
*font
, hb_codepoint_t glyph_id
,
303 hb_position_t
*x
, hb_position_t
*y
) const
307 case 1: u
.format1
.get_anchor (font
, glyph_id
, x
, y
); return;
308 case 2: u
.format2
.get_anchor (font
, glyph_id
, x
, y
); return;
309 case 3: u
.format3
.get_anchor (font
, glyph_id
, x
, y
); return;
314 inline bool sanitize (hb_sanitize_context_t
*c
) {
315 TRACE_SANITIZE (this);
316 if (!u
.format
.sanitize (c
)) return TRACE_RETURN (false);
318 case 1: return TRACE_RETURN (u
.format1
.sanitize (c
));
319 case 2: return TRACE_RETURN (u
.format2
.sanitize (c
));
320 case 3: return TRACE_RETURN (u
.format3
.sanitize (c
));
321 default:return TRACE_RETURN (true);
327 USHORT format
; /* Format identifier */
328 AnchorFormat1 format1
;
329 AnchorFormat2 format2
;
330 AnchorFormat3 format3
;
333 DEFINE_SIZE_UNION (2, format
);
339 inline const Anchor
& get_anchor (unsigned int row
, unsigned int col
, unsigned int cols
, bool *found
) const {
341 if (unlikely (row
>= rows
|| col
>= cols
)) return Null(Anchor
);
342 *found
= !matrix
[row
* cols
+ col
].is_null ();
343 return this+matrix
[row
* cols
+ col
];
346 inline bool sanitize (hb_sanitize_context_t
*c
, unsigned int cols
) {
347 TRACE_SANITIZE (this);
348 if (!c
->check_struct (this)) return TRACE_RETURN (false);
349 if (unlikely (rows
> 0 && cols
>= ((unsigned int) -1) / rows
)) return TRACE_RETURN (false);
350 unsigned int count
= rows
* cols
;
351 if (!c
->check_array (matrix
, matrix
[0].static_size
, count
)) return TRACE_RETURN (false);
352 for (unsigned int i
= 0; i
< count
; i
++)
353 if (!matrix
[i
].sanitize (c
, this)) return TRACE_RETURN (false);
354 return TRACE_RETURN (true);
357 USHORT rows
; /* Number of rows */
360 matrix
[VAR
]; /* Matrix of offsets to Anchor tables--
361 * from beginning of AnchorMatrix table */
363 DEFINE_SIZE_ARRAY (2, matrix
);
369 friend struct MarkArray
;
371 inline bool sanitize (hb_sanitize_context_t
*c
, void *base
) {
372 TRACE_SANITIZE (this);
373 return TRACE_RETURN (c
->check_struct (this) && markAnchor
.sanitize (c
, base
));
377 USHORT klass
; /* Class defined for this mark */
379 markAnchor
; /* Offset to Anchor table--from
380 * beginning of MarkArray table */
382 DEFINE_SIZE_STATIC (4);
385 struct MarkArray
: ArrayOf
<MarkRecord
> /* Array of MarkRecords--in Coverage order */
387 inline bool apply (hb_apply_context_t
*c
,
388 unsigned int mark_index
, unsigned int glyph_index
,
389 const AnchorMatrix
&anchors
, unsigned int class_count
,
390 unsigned int glyph_pos
) const
393 hb_buffer_t
*buffer
= c
->buffer
;
394 const MarkRecord
&record
= ArrayOf
<MarkRecord
>::operator[](mark_index
);
395 unsigned int mark_class
= record
.klass
;
397 const Anchor
& mark_anchor
= this + record
.markAnchor
;
399 const Anchor
& glyph_anchor
= anchors
.get_anchor (glyph_index
, mark_class
, class_count
, &found
);
400 /* If this subtable doesn't have an anchor for this base and this class,
401 * return false such that the subsequent subtables have a chance at it. */
402 if (unlikely (!found
)) return TRACE_RETURN (false);
404 hb_position_t mark_x
, mark_y
, base_x
, base_y
;
406 mark_anchor
.get_anchor (c
->font
, buffer
->cur().codepoint
, &mark_x
, &mark_y
);
407 glyph_anchor
.get_anchor (c
->font
, buffer
->info
[glyph_pos
].codepoint
, &base_x
, &base_y
);
409 hb_glyph_position_t
&o
= buffer
->cur_pos();
410 o
.x_offset
= base_x
- mark_x
;
411 o
.y_offset
= base_y
- mark_y
;
412 o
.attach_lookback() = buffer
->idx
- glyph_pos
;
415 return TRACE_RETURN (true);
418 inline bool sanitize (hb_sanitize_context_t
*c
) {
419 TRACE_SANITIZE (this);
420 return TRACE_RETURN (ArrayOf
<MarkRecord
>::sanitize (c
, this));
427 struct SinglePosFormat1
429 inline void collect_glyphs (hb_collect_glyphs_context_t
*c
) const
431 TRACE_COLLECT_GLYPHS (this);
432 (this+coverage
).add_coverage (c
->input
);
435 inline const Coverage
&get_coverage (void) const
437 return this+coverage
;
440 inline bool apply (hb_apply_context_t
*c
) const
443 hb_buffer_t
*buffer
= c
->buffer
;
444 unsigned int index
= (this+coverage
).get_coverage (buffer
->cur().codepoint
);
445 if (likely (index
== NOT_COVERED
)) return TRACE_RETURN (false);
447 valueFormat
.apply_value (c
->font
, c
->direction
, this,
448 values
, buffer
->cur_pos());
451 return TRACE_RETURN (true);
454 inline bool sanitize (hb_sanitize_context_t
*c
) {
455 TRACE_SANITIZE (this);
456 return TRACE_RETURN (c
->check_struct (this) && coverage
.sanitize (c
, this) && valueFormat
.sanitize_value (c
, this, values
));
460 USHORT format
; /* Format identifier--format = 1 */
462 coverage
; /* Offset to Coverage table--from
463 * beginning of subtable */
464 ValueFormat valueFormat
; /* Defines the types of data in the
466 ValueRecord values
; /* Defines positioning
467 * value(s)--applied to all glyphs in
468 * the Coverage table */
470 DEFINE_SIZE_ARRAY (6, values
);
473 struct SinglePosFormat2
475 inline void collect_glyphs (hb_collect_glyphs_context_t
*c
) const
477 TRACE_COLLECT_GLYPHS (this);
478 (this+coverage
).add_coverage (c
->input
);
481 inline const Coverage
&get_coverage (void) const
483 return this+coverage
;
486 inline bool apply (hb_apply_context_t
*c
) const
489 hb_buffer_t
*buffer
= c
->buffer
;
490 unsigned int index
= (this+coverage
).get_coverage (buffer
->cur().codepoint
);
491 if (likely (index
== NOT_COVERED
)) return TRACE_RETURN (false);
493 if (likely (index
>= valueCount
)) return TRACE_RETURN (false);
495 valueFormat
.apply_value (c
->font
, c
->direction
, this,
496 &values
[index
* valueFormat
.get_len ()],
500 return TRACE_RETURN (true);
503 inline bool sanitize (hb_sanitize_context_t
*c
) {
504 TRACE_SANITIZE (this);
505 return TRACE_RETURN (c
->check_struct (this) && coverage
.sanitize (c
, this) && valueFormat
.sanitize_values (c
, this, values
, valueCount
));
509 USHORT format
; /* Format identifier--format = 2 */
511 coverage
; /* Offset to Coverage table--from
512 * beginning of subtable */
513 ValueFormat valueFormat
; /* Defines the types of data in the
515 USHORT valueCount
; /* Number of ValueRecords */
516 ValueRecord values
; /* Array of ValueRecords--positioning
517 * values applied to glyphs */
519 DEFINE_SIZE_ARRAY (8, values
);
524 template <typename context_t
>
525 inline typename
context_t::return_t
dispatch (context_t
*c
) const
527 TRACE_DISPATCH (this);
529 case 1: return TRACE_RETURN (c
->dispatch (u
.format1
));
530 case 2: return TRACE_RETURN (c
->dispatch (u
.format2
));
531 default:return TRACE_RETURN (c
->default_return_value ());
535 inline bool sanitize (hb_sanitize_context_t
*c
) {
536 TRACE_SANITIZE (this);
537 if (!u
.format
.sanitize (c
)) return TRACE_RETURN (false);
539 case 1: return TRACE_RETURN (u
.format1
.sanitize (c
));
540 case 2: return TRACE_RETURN (u
.format2
.sanitize (c
));
541 default:return TRACE_RETURN (true);
547 USHORT format
; /* Format identifier */
548 SinglePosFormat1 format1
;
549 SinglePosFormat2 format2
;
554 struct PairValueRecord
556 friend struct PairSet
;
559 GlyphID secondGlyph
; /* GlyphID of second glyph in the
560 * pair--first glyph is listed in the
562 ValueRecord values
; /* Positioning data for the first glyph
563 * followed by for second glyph */
565 DEFINE_SIZE_ARRAY (2, values
);
570 friend struct PairPosFormat1
;
572 inline void collect_glyphs (hb_collect_glyphs_context_t
*c
,
573 const ValueFormat
*valueFormats
) const
575 TRACE_COLLECT_GLYPHS (this);
576 unsigned int len1
= valueFormats
[0].get_len ();
577 unsigned int len2
= valueFormats
[1].get_len ();
578 unsigned int record_size
= USHORT::static_size
* (1 + len1
+ len2
);
580 const PairValueRecord
*record
= CastP
<PairValueRecord
> (array
);
581 unsigned int count
= len
;
582 for (unsigned int i
= 0; i
< count
; i
++)
584 c
->input
->add (record
->secondGlyph
);
585 record
= &StructAtOffset
<PairValueRecord
> (record
, record_size
);
589 inline bool apply (hb_apply_context_t
*c
,
590 const ValueFormat
*valueFormats
,
591 unsigned int pos
) const
594 hb_buffer_t
*buffer
= c
->buffer
;
595 unsigned int len1
= valueFormats
[0].get_len ();
596 unsigned int len2
= valueFormats
[1].get_len ();
597 unsigned int record_size
= USHORT::static_size
* (1 + len1
+ len2
);
599 const PairValueRecord
*record
= CastP
<PairValueRecord
> (array
);
600 unsigned int count
= len
;
601 for (unsigned int i
= 0; i
< count
; i
++)
604 if (buffer
->info
[pos
].codepoint
== record
->secondGlyph
)
606 valueFormats
[0].apply_value (c
->font
, c
->direction
, this,
607 &record
->values
[0], buffer
->cur_pos());
608 valueFormats
[1].apply_value (c
->font
, c
->direction
, this,
609 &record
->values
[len1
], buffer
->pos
[pos
]);
613 return TRACE_RETURN (true);
615 record
= &StructAtOffset
<PairValueRecord
> (record
, record_size
);
618 return TRACE_RETURN (false);
621 struct sanitize_closure_t
{
623 ValueFormat
*valueFormats
;
624 unsigned int len1
; /* valueFormats[0].get_len() */
625 unsigned int stride
; /* 1 + len1 + len2 */
628 inline bool sanitize (hb_sanitize_context_t
*c
, const sanitize_closure_t
*closure
) {
629 TRACE_SANITIZE (this);
630 if (!(c
->check_struct (this)
631 && c
->check_array (array
, USHORT::static_size
* closure
->stride
, len
))) return TRACE_RETURN (false);
633 unsigned int count
= len
;
634 PairValueRecord
*record
= CastP
<PairValueRecord
> (array
);
635 return TRACE_RETURN (closure
->valueFormats
[0].sanitize_values_stride_unsafe (c
, closure
->base
, &record
->values
[0], count
, closure
->stride
)
636 && closure
->valueFormats
[1].sanitize_values_stride_unsafe (c
, closure
->base
, &record
->values
[closure
->len1
], count
, closure
->stride
));
640 USHORT len
; /* Number of PairValueRecords */
641 USHORT array
[VAR
]; /* Array of PairValueRecords--ordered
642 * by GlyphID of the second glyph */
644 DEFINE_SIZE_ARRAY (2, array
);
647 struct PairPosFormat1
649 inline void collect_glyphs (hb_collect_glyphs_context_t
*c
) const
651 TRACE_COLLECT_GLYPHS (this);
652 (this+coverage
).add_coverage (c
->input
);
653 unsigned int count
= pairSet
.len
;
654 for (unsigned int i
= 0; i
< count
; i
++)
655 (this+pairSet
[i
]).collect_glyphs (c
, &valueFormat1
);
658 inline const Coverage
&get_coverage (void) const
660 return this+coverage
;
663 inline bool apply (hb_apply_context_t
*c
) const
666 hb_buffer_t
*buffer
= c
->buffer
;
667 hb_apply_context_t::skipping_forward_iterator_t
skippy_iter (c
, buffer
->idx
, 1);
668 if (skippy_iter
.has_no_chance ()) return TRACE_RETURN (false);
670 unsigned int index
= (this+coverage
).get_coverage (buffer
->cur().codepoint
);
671 if (likely (index
== NOT_COVERED
)) return TRACE_RETURN (false);
673 if (!skippy_iter
.next ()) return TRACE_RETURN (false);
675 return TRACE_RETURN ((this+pairSet
[index
]).apply (c
, &valueFormat1
, skippy_iter
.idx
));
678 inline bool sanitize (hb_sanitize_context_t
*c
) {
679 TRACE_SANITIZE (this);
681 unsigned int len1
= valueFormat1
.get_len ();
682 unsigned int len2
= valueFormat2
.get_len ();
683 PairSet::sanitize_closure_t closure
= {
690 return TRACE_RETURN (c
->check_struct (this) && coverage
.sanitize (c
, this) && pairSet
.sanitize (c
, this, &closure
));
694 USHORT format
; /* Format identifier--format = 1 */
696 coverage
; /* Offset to Coverage table--from
697 * beginning of subtable */
698 ValueFormat valueFormat1
; /* Defines the types of data in
699 * ValueRecord1--for the first glyph
700 * in the pair--may be zero (0) */
701 ValueFormat valueFormat2
; /* Defines the types of data in
702 * ValueRecord2--for the second glyph
703 * in the pair--may be zero (0) */
704 OffsetArrayOf
<PairSet
>
705 pairSet
; /* Array of PairSet tables
706 * ordered by Coverage Index */
708 DEFINE_SIZE_ARRAY (10, pairSet
);
711 struct PairPosFormat2
713 inline void collect_glyphs (hb_collect_glyphs_context_t
*c
) const
715 TRACE_COLLECT_GLYPHS (this);
716 /* (this+coverage).add_coverage (c->input); // Don't need this. */
718 unsigned int count1
= class1Count
;
719 const ClassDef
&klass1
= this+classDef1
;
720 for (unsigned int i
= 0; i
< count1
; i
++)
721 klass1
.add_class (c
->input
, i
);
723 unsigned int count2
= class2Count
;
724 const ClassDef
&klass2
= this+classDef2
;
725 for (unsigned int i
= 0; i
< count2
; i
++)
726 klass2
.add_class (c
->input
, i
);
729 inline const Coverage
&get_coverage (void) const
731 return this+coverage
;
734 inline bool apply (hb_apply_context_t
*c
) const
737 hb_buffer_t
*buffer
= c
->buffer
;
738 hb_apply_context_t::skipping_forward_iterator_t
skippy_iter (c
, buffer
->idx
, 1);
739 if (skippy_iter
.has_no_chance ()) return TRACE_RETURN (false);
741 unsigned int index
= (this+coverage
).get_coverage (buffer
->cur().codepoint
);
742 if (likely (index
== NOT_COVERED
)) return TRACE_RETURN (false);
744 if (!skippy_iter
.next ()) return TRACE_RETURN (false);
746 unsigned int len1
= valueFormat1
.get_len ();
747 unsigned int len2
= valueFormat2
.get_len ();
748 unsigned int record_len
= len1
+ len2
;
750 unsigned int klass1
= (this+classDef1
).get_class (buffer
->cur().codepoint
);
751 unsigned int klass2
= (this+classDef2
).get_class (buffer
->info
[skippy_iter
.idx
].codepoint
);
752 if (unlikely (klass1
>= class1Count
|| klass2
>= class2Count
)) return TRACE_RETURN (false);
754 const Value
*v
= &values
[record_len
* (klass1
* class2Count
+ klass2
)];
755 valueFormat1
.apply_value (c
->font
, c
->direction
, this,
756 v
, buffer
->cur_pos());
757 valueFormat2
.apply_value (c
->font
, c
->direction
, this,
758 v
+ len1
, buffer
->pos
[skippy_iter
.idx
]);
760 buffer
->idx
= skippy_iter
.idx
;
764 return TRACE_RETURN (true);
767 inline bool sanitize (hb_sanitize_context_t
*c
) {
768 TRACE_SANITIZE (this);
769 if (!(c
->check_struct (this)
770 && coverage
.sanitize (c
, this)
771 && classDef1
.sanitize (c
, this)
772 && classDef2
.sanitize (c
, this))) return TRACE_RETURN (false);
774 unsigned int len1
= valueFormat1
.get_len ();
775 unsigned int len2
= valueFormat2
.get_len ();
776 unsigned int stride
= len1
+ len2
;
777 unsigned int record_size
= valueFormat1
.get_size () + valueFormat2
.get_size ();
778 unsigned int count
= (unsigned int) class1Count
* (unsigned int) class2Count
;
779 return TRACE_RETURN (c
->check_array (values
, record_size
, count
) &&
780 valueFormat1
.sanitize_values_stride_unsafe (c
, this, &values
[0], count
, stride
) &&
781 valueFormat2
.sanitize_values_stride_unsafe (c
, this, &values
[len1
], count
, stride
));
785 USHORT format
; /* Format identifier--format = 2 */
787 coverage
; /* Offset to Coverage table--from
788 * beginning of subtable */
789 ValueFormat valueFormat1
; /* ValueRecord definition--for the
790 * first glyph of the pair--may be zero
792 ValueFormat valueFormat2
; /* ValueRecord definition--for the
793 * second glyph of the pair--may be
796 classDef1
; /* Offset to ClassDef table--from
797 * beginning of PairPos subtable--for
798 * the first glyph of the pair */
800 classDef2
; /* Offset to ClassDef table--from
801 * beginning of PairPos subtable--for
802 * the second glyph of the pair */
803 USHORT class1Count
; /* Number of classes in ClassDef1
804 * table--includes Class0 */
805 USHORT class2Count
; /* Number of classes in ClassDef2
806 * table--includes Class0 */
807 ValueRecord values
; /* Matrix of value pairs:
808 * class1-major, class2-minor,
809 * Each entry has value1 and value2 */
811 DEFINE_SIZE_ARRAY (16, values
);
816 template <typename context_t
>
817 inline typename
context_t::return_t
dispatch (context_t
*c
) const
819 TRACE_DISPATCH (this);
821 case 1: return TRACE_RETURN (c
->dispatch (u
.format1
));
822 case 2: return TRACE_RETURN (c
->dispatch (u
.format2
));
823 default:return TRACE_RETURN (c
->default_return_value ());
827 inline bool sanitize (hb_sanitize_context_t
*c
) {
828 TRACE_SANITIZE (this);
829 if (!u
.format
.sanitize (c
)) return TRACE_RETURN (false);
831 case 1: return TRACE_RETURN (u
.format1
.sanitize (c
));
832 case 2: return TRACE_RETURN (u
.format2
.sanitize (c
));
833 default:return TRACE_RETURN (true);
839 USHORT format
; /* Format identifier */
840 PairPosFormat1 format1
;
841 PairPosFormat2 format2
;
846 struct EntryExitRecord
848 friend struct CursivePosFormat1
;
850 inline bool sanitize (hb_sanitize_context_t
*c
, void *base
) {
851 TRACE_SANITIZE (this);
852 return TRACE_RETURN (entryAnchor
.sanitize (c
, base
) && exitAnchor
.sanitize (c
, base
));
857 entryAnchor
; /* Offset to EntryAnchor table--from
858 * beginning of CursivePos
859 * subtable--may be NULL */
861 exitAnchor
; /* Offset to ExitAnchor table--from
862 * beginning of CursivePos
863 * subtable--may be NULL */
865 DEFINE_SIZE_STATIC (4);
868 struct CursivePosFormat1
870 inline void collect_glyphs (hb_collect_glyphs_context_t
*c
) const
872 TRACE_COLLECT_GLYPHS (this);
873 (this+coverage
).add_coverage (c
->input
);
876 inline const Coverage
&get_coverage (void) const
878 return this+coverage
;
881 inline bool apply (hb_apply_context_t
*c
) const
884 hb_buffer_t
*buffer
= c
->buffer
;
886 /* We don't handle mark glyphs here. */
887 if (unlikely (_hb_glyph_info_is_mark (&buffer
->cur()))) return TRACE_RETURN (false);
889 hb_apply_context_t::skipping_forward_iterator_t
skippy_iter (c
, buffer
->idx
, 1);
890 if (skippy_iter
.has_no_chance ()) return TRACE_RETURN (false);
892 const EntryExitRecord
&this_record
= entryExitRecord
[(this+coverage
).get_coverage (buffer
->cur().codepoint
)];
893 if (!this_record
.exitAnchor
) return TRACE_RETURN (false);
895 if (!skippy_iter
.next ()) return TRACE_RETURN (false);
897 const EntryExitRecord
&next_record
= entryExitRecord
[(this+coverage
).get_coverage (buffer
->info
[skippy_iter
.idx
].codepoint
)];
898 if (!next_record
.entryAnchor
) return TRACE_RETURN (false);
900 unsigned int i
= buffer
->idx
;
901 unsigned int j
= skippy_iter
.idx
;
903 hb_position_t entry_x
, entry_y
, exit_x
, exit_y
;
904 (this+this_record
.exitAnchor
).get_anchor (c
->font
, buffer
->info
[i
].codepoint
, &exit_x
, &exit_y
);
905 (this+next_record
.entryAnchor
).get_anchor (c
->font
, buffer
->info
[j
].codepoint
, &entry_x
, &entry_y
);
907 hb_glyph_position_t
*pos
= buffer
->pos
;
910 /* Main-direction adjustment */
911 switch (c
->direction
) {
912 case HB_DIRECTION_LTR
:
913 pos
[i
].x_advance
= exit_x
+ pos
[i
].x_offset
;
915 d
= entry_x
+ pos
[j
].x_offset
;
916 pos
[j
].x_advance
-= d
;
917 pos
[j
].x_offset
-= d
;
919 case HB_DIRECTION_RTL
:
920 d
= exit_x
+ pos
[i
].x_offset
;
921 pos
[i
].x_advance
-= d
;
922 pos
[i
].x_offset
-= d
;
924 pos
[j
].x_advance
= entry_x
+ pos
[j
].x_offset
;
926 case HB_DIRECTION_TTB
:
927 pos
[i
].y_advance
= exit_y
+ pos
[i
].y_offset
;
929 d
= entry_y
+ pos
[j
].y_offset
;
930 pos
[j
].y_advance
-= d
;
931 pos
[j
].y_offset
-= d
;
933 case HB_DIRECTION_BTT
:
934 d
= exit_y
+ pos
[i
].y_offset
;
935 pos
[i
].y_advance
-= d
;
936 pos
[i
].y_offset
-= d
;
938 pos
[j
].y_advance
= entry_y
;
940 case HB_DIRECTION_INVALID
:
945 /* Cross-direction adjustment */
946 if (c
->lookup_props
& LookupFlag::RightToLeft
) {
947 pos
[i
].cursive_chain() = j
- i
;
948 if (likely (HB_DIRECTION_IS_HORIZONTAL (c
->direction
)))
949 pos
[i
].y_offset
= entry_y
- exit_y
;
951 pos
[i
].x_offset
= entry_x
- exit_x
;
953 pos
[j
].cursive_chain() = i
- j
;
954 if (likely (HB_DIRECTION_IS_HORIZONTAL (c
->direction
)))
955 pos
[j
].y_offset
= exit_y
- entry_y
;
957 pos
[j
].x_offset
= exit_x
- entry_x
;
961 return TRACE_RETURN (true);
964 inline bool sanitize (hb_sanitize_context_t
*c
) {
965 TRACE_SANITIZE (this);
966 return TRACE_RETURN (coverage
.sanitize (c
, this) && entryExitRecord
.sanitize (c
, this));
970 USHORT format
; /* Format identifier--format = 1 */
972 coverage
; /* Offset to Coverage table--from
973 * beginning of subtable */
974 ArrayOf
<EntryExitRecord
>
975 entryExitRecord
; /* Array of EntryExit records--in
976 * Coverage Index order */
978 DEFINE_SIZE_ARRAY (6, entryExitRecord
);
983 template <typename context_t
>
984 inline typename
context_t::return_t
dispatch (context_t
*c
) const
986 TRACE_DISPATCH (this);
988 case 1: return TRACE_RETURN (c
->dispatch (u
.format1
));
989 default:return TRACE_RETURN (c
->default_return_value ());
993 inline bool sanitize (hb_sanitize_context_t
*c
) {
994 TRACE_SANITIZE (this);
995 if (!u
.format
.sanitize (c
)) return TRACE_RETURN (false);
997 case 1: return TRACE_RETURN (u
.format1
.sanitize (c
));
998 default:return TRACE_RETURN (true);
1004 USHORT format
; /* Format identifier */
1005 CursivePosFormat1 format1
;
1010 typedef AnchorMatrix BaseArray
; /* base-major--
1011 * in order of BaseCoverage Index--,
1013 * ordered by class--zero-based. */
1015 struct MarkBasePosFormat1
1017 inline void collect_glyphs (hb_collect_glyphs_context_t
*c
) const
1019 TRACE_COLLECT_GLYPHS (this);
1020 (this+markCoverage
).add_coverage (c
->input
);
1021 (this+baseCoverage
).add_coverage (c
->input
);
1024 inline const Coverage
&get_coverage (void) const
1026 return this+markCoverage
;
1029 inline bool apply (hb_apply_context_t
*c
) const
1032 hb_buffer_t
*buffer
= c
->buffer
;
1033 unsigned int mark_index
= (this+markCoverage
).get_coverage (buffer
->cur().codepoint
);
1034 if (likely (mark_index
== NOT_COVERED
)) return TRACE_RETURN (false);
1036 /* now we search backwards for a non-mark glyph */
1037 hb_apply_context_t::skipping_backward_iterator_t
skippy_iter (c
, buffer
->idx
, 1);
1038 skippy_iter
.set_lookup_props (LookupFlag::IgnoreMarks
);
1040 if (!skippy_iter
.prev ()) return TRACE_RETURN (false);
1041 /* We only want to attach to the first of a MultipleSubst sequence. Reject others. */
1042 if (0 == _hb_glyph_info_get_lig_comp (&buffer
->info
[skippy_iter
.idx
])) break;
1043 skippy_iter
.reject ();
1046 /* Checking that matched glyph is actually a base glyph by GDEF is too strong; disabled */
1047 if (!_hb_glyph_info_is_base_glyph (&buffer
->info
[skippy_iter
.idx
])) { /*return TRACE_RETURN (false);*/ }
1049 unsigned int base_index
= (this+baseCoverage
).get_coverage (buffer
->info
[skippy_iter
.idx
].codepoint
);
1050 if (base_index
== NOT_COVERED
) return TRACE_RETURN (false);
1052 return TRACE_RETURN ((this+markArray
).apply (c
, mark_index
, base_index
, this+baseArray
, classCount
, skippy_iter
.idx
));
1055 inline bool sanitize (hb_sanitize_context_t
*c
) {
1056 TRACE_SANITIZE (this);
1057 return TRACE_RETURN (c
->check_struct (this) && markCoverage
.sanitize (c
, this) && baseCoverage
.sanitize (c
, this) &&
1058 markArray
.sanitize (c
, this) && baseArray
.sanitize (c
, this, (unsigned int) classCount
));
1062 USHORT format
; /* Format identifier--format = 1 */
1064 markCoverage
; /* Offset to MarkCoverage table--from
1065 * beginning of MarkBasePos subtable */
1067 baseCoverage
; /* Offset to BaseCoverage table--from
1068 * beginning of MarkBasePos subtable */
1069 USHORT classCount
; /* Number of classes defined for marks */
1071 markArray
; /* Offset to MarkArray table--from
1072 * beginning of MarkBasePos subtable */
1074 baseArray
; /* Offset to BaseArray table--from
1075 * beginning of MarkBasePos subtable */
1077 DEFINE_SIZE_STATIC (12);
1082 template <typename context_t
>
1083 inline typename
context_t::return_t
dispatch (context_t
*c
) const
1085 TRACE_DISPATCH (this);
1087 case 1: return TRACE_RETURN (c
->dispatch (u
.format1
));
1088 default:return TRACE_RETURN (c
->default_return_value ());
1092 inline bool sanitize (hb_sanitize_context_t
*c
) {
1093 TRACE_SANITIZE (this);
1094 if (!u
.format
.sanitize (c
)) return TRACE_RETURN (false);
1096 case 1: return TRACE_RETURN (u
.format1
.sanitize (c
));
1097 default:return TRACE_RETURN (true);
1103 USHORT format
; /* Format identifier */
1104 MarkBasePosFormat1 format1
;
1109 typedef AnchorMatrix LigatureAttach
; /* component-major--
1110 * in order of writing direction--,
1112 * ordered by class--zero-based. */
1114 typedef OffsetListOf
<LigatureAttach
> LigatureArray
;
1115 /* Array of LigatureAttach
1117 * LigatureCoverage Index */
1119 struct MarkLigPosFormat1
1121 inline void collect_glyphs (hb_collect_glyphs_context_t
*c
) const
1123 TRACE_COLLECT_GLYPHS (this);
1124 (this+markCoverage
).add_coverage (c
->input
);
1125 (this+ligatureCoverage
).add_coverage (c
->input
);
1128 inline const Coverage
&get_coverage (void) const
1130 return this+markCoverage
;
1133 inline bool apply (hb_apply_context_t
*c
) const
1136 hb_buffer_t
*buffer
= c
->buffer
;
1137 unsigned int mark_index
= (this+markCoverage
).get_coverage (buffer
->cur().codepoint
);
1138 if (likely (mark_index
== NOT_COVERED
)) return TRACE_RETURN (false);
1140 /* now we search backwards for a non-mark glyph */
1141 hb_apply_context_t::skipping_backward_iterator_t
skippy_iter (c
, buffer
->idx
, 1);
1142 skippy_iter
.set_lookup_props (LookupFlag::IgnoreMarks
);
1143 if (!skippy_iter
.prev ()) return TRACE_RETURN (false);
1145 /* Checking that matched glyph is actually a ligature by GDEF is too strong; disabled */
1146 if (!_hb_glyph_info_is_ligature (&buffer
->info
[skippy_iter
.idx
])) { /*return TRACE_RETURN (false);*/ }
1148 unsigned int j
= skippy_iter
.idx
;
1149 unsigned int lig_index
= (this+ligatureCoverage
).get_coverage (buffer
->info
[j
].codepoint
);
1150 if (lig_index
== NOT_COVERED
) return TRACE_RETURN (false);
1152 const LigatureArray
& lig_array
= this+ligatureArray
;
1153 const LigatureAttach
& lig_attach
= lig_array
[lig_index
];
1155 /* Find component to attach to */
1156 unsigned int comp_count
= lig_attach
.rows
;
1157 if (unlikely (!comp_count
)) return TRACE_RETURN (false);
1159 /* We must now check whether the ligature ID of the current mark glyph
1160 * is identical to the ligature ID of the found ligature. If yes, we
1161 * can directly use the component index. If not, we attach the mark
1162 * glyph to the last component of the ligature. */
1163 unsigned int comp_index
;
1164 unsigned int lig_id
= _hb_glyph_info_get_lig_id (&buffer
->info
[j
]);
1165 unsigned int mark_id
= _hb_glyph_info_get_lig_id (&buffer
->cur());
1166 unsigned int mark_comp
= _hb_glyph_info_get_lig_comp (&buffer
->cur());
1167 if (lig_id
&& lig_id
== mark_id
&& mark_comp
> 0)
1168 comp_index
= MIN (comp_count
, _hb_glyph_info_get_lig_comp (&buffer
->cur())) - 1;
1170 comp_index
= comp_count
- 1;
1172 return TRACE_RETURN ((this+markArray
).apply (c
, mark_index
, comp_index
, lig_attach
, classCount
, j
));
1175 inline bool sanitize (hb_sanitize_context_t
*c
) {
1176 TRACE_SANITIZE (this);
1177 return TRACE_RETURN (c
->check_struct (this) && markCoverage
.sanitize (c
, this) && ligatureCoverage
.sanitize (c
, this) &&
1178 markArray
.sanitize (c
, this) && ligatureArray
.sanitize (c
, this, (unsigned int) classCount
));
1182 USHORT format
; /* Format identifier--format = 1 */
1184 markCoverage
; /* Offset to Mark Coverage table--from
1185 * beginning of MarkLigPos subtable */
1187 ligatureCoverage
; /* Offset to Ligature Coverage
1188 * table--from beginning of MarkLigPos
1190 USHORT classCount
; /* Number of defined mark classes */
1192 markArray
; /* Offset to MarkArray table--from
1193 * beginning of MarkLigPos subtable */
1194 OffsetTo
<LigatureArray
>
1195 ligatureArray
; /* Offset to LigatureArray table--from
1196 * beginning of MarkLigPos subtable */
1198 DEFINE_SIZE_STATIC (12);
1203 template <typename context_t
>
1204 inline typename
context_t::return_t
dispatch (context_t
*c
) const
1206 TRACE_DISPATCH (this);
1208 case 1: return TRACE_RETURN (c
->dispatch (u
.format1
));
1209 default:return TRACE_RETURN (c
->default_return_value ());
1213 inline bool sanitize (hb_sanitize_context_t
*c
) {
1214 TRACE_SANITIZE (this);
1215 if (!u
.format
.sanitize (c
)) return TRACE_RETURN (false);
1217 case 1: return TRACE_RETURN (u
.format1
.sanitize (c
));
1218 default:return TRACE_RETURN (true);
1224 USHORT format
; /* Format identifier */
1225 MarkLigPosFormat1 format1
;
1230 typedef AnchorMatrix Mark2Array
; /* mark2-major--
1231 * in order of Mark2Coverage Index--,
1233 * ordered by class--zero-based. */
1235 struct MarkMarkPosFormat1
1237 inline void collect_glyphs (hb_collect_glyphs_context_t
*c
) const
1239 TRACE_COLLECT_GLYPHS (this);
1240 (this+mark1Coverage
).add_coverage (c
->input
);
1241 (this+mark2Coverage
).add_coverage (c
->input
);
1244 inline const Coverage
&get_coverage (void) const
1246 return this+mark1Coverage
;
1249 inline bool apply (hb_apply_context_t
*c
) const
1252 hb_buffer_t
*buffer
= c
->buffer
;
1253 unsigned int mark1_index
= (this+mark1Coverage
).get_coverage (buffer
->cur().codepoint
);
1254 if (likely (mark1_index
== NOT_COVERED
)) return TRACE_RETURN (false);
1256 /* now we search backwards for a suitable mark glyph until a non-mark glyph */
1257 hb_apply_context_t::skipping_backward_iterator_t
skippy_iter (c
, buffer
->idx
, 1);
1258 skippy_iter
.set_lookup_props (c
->lookup_props
& ~LookupFlag::IgnoreFlags
);
1259 if (!skippy_iter
.prev ()) return TRACE_RETURN (false);
1261 if (!_hb_glyph_info_is_mark (&buffer
->info
[skippy_iter
.idx
])) { return TRACE_RETURN (false); }
1263 unsigned int j
= skippy_iter
.idx
;
1265 unsigned int id1
= _hb_glyph_info_get_lig_id (&buffer
->cur());
1266 unsigned int id2
= _hb_glyph_info_get_lig_id (&buffer
->info
[j
]);
1267 unsigned int comp1
= _hb_glyph_info_get_lig_comp (&buffer
->cur());
1268 unsigned int comp2
= _hb_glyph_info_get_lig_comp (&buffer
->info
[j
]);
1270 if (likely (id1
== id2
)) {
1271 if (id1
== 0) /* Marks belonging to the same base. */
1273 else if (comp1
== comp2
) /* Marks belonging to the same ligature component. */
1276 /* If ligature ids don't match, it may be the case that one of the marks
1277 * itself is a ligature. In which case match. */
1278 if ((id1
> 0 && !comp1
) || (id2
> 0 && !comp2
))
1283 return TRACE_RETURN (false);
1286 unsigned int mark2_index
= (this+mark2Coverage
).get_coverage (buffer
->info
[j
].codepoint
);
1287 if (mark2_index
== NOT_COVERED
) return TRACE_RETURN (false);
1289 return TRACE_RETURN ((this+mark1Array
).apply (c
, mark1_index
, mark2_index
, this+mark2Array
, classCount
, j
));
1292 inline bool sanitize (hb_sanitize_context_t
*c
) {
1293 TRACE_SANITIZE (this);
1294 return TRACE_RETURN (c
->check_struct (this) && mark1Coverage
.sanitize (c
, this) &&
1295 mark2Coverage
.sanitize (c
, this) && mark1Array
.sanitize (c
, this)
1296 && mark2Array
.sanitize (c
, this, (unsigned int) classCount
));
1300 USHORT format
; /* Format identifier--format = 1 */
1302 mark1Coverage
; /* Offset to Combining Mark1 Coverage
1303 * table--from beginning of MarkMarkPos
1306 mark2Coverage
; /* Offset to Combining Mark2 Coverage
1307 * table--from beginning of MarkMarkPos
1309 USHORT classCount
; /* Number of defined mark classes */
1311 mark1Array
; /* Offset to Mark1Array table--from
1312 * beginning of MarkMarkPos subtable */
1313 OffsetTo
<Mark2Array
>
1314 mark2Array
; /* Offset to Mark2Array table--from
1315 * beginning of MarkMarkPos subtable */
1317 DEFINE_SIZE_STATIC (12);
1322 template <typename context_t
>
1323 inline typename
context_t::return_t
dispatch (context_t
*c
) const
1325 TRACE_DISPATCH (this);
1327 case 1: return TRACE_RETURN (c
->dispatch (u
.format1
));
1328 default:return TRACE_RETURN (c
->default_return_value ());
1332 inline bool sanitize (hb_sanitize_context_t
*c
) {
1333 TRACE_SANITIZE (this);
1334 if (!u
.format
.sanitize (c
)) return TRACE_RETURN (false);
1336 case 1: return TRACE_RETURN (u
.format1
.sanitize (c
));
1337 default:return TRACE_RETURN (true);
1343 USHORT format
; /* Format identifier */
1344 MarkMarkPosFormat1 format1
;
1349 struct ContextPos
: Context
{};
1351 struct ChainContextPos
: ChainContext
{};
1353 struct ExtensionPos
: Extension
<ExtensionPos
>
1355 typedef struct PosLookupSubTable LookupSubTable
;
1365 struct PosLookupSubTable
1367 friend struct PosLookup
;
1381 template <typename context_t
>
1382 inline typename
context_t::return_t
dispatch (context_t
*c
, unsigned int lookup_type
) const
1384 TRACE_DISPATCH (this);
1385 switch (lookup_type
) {
1386 case Single
: return TRACE_RETURN (u
.single
.dispatch (c
));
1387 case Pair
: return TRACE_RETURN (u
.pair
.dispatch (c
));
1388 case Cursive
: return TRACE_RETURN (u
.cursive
.dispatch (c
));
1389 case MarkBase
: return TRACE_RETURN (u
.markBase
.dispatch (c
));
1390 case MarkLig
: return TRACE_RETURN (u
.markLig
.dispatch (c
));
1391 case MarkMark
: return TRACE_RETURN (u
.markMark
.dispatch (c
));
1392 case Context
: return TRACE_RETURN (u
.context
.dispatch (c
));
1393 case ChainContext
: return TRACE_RETURN (u
.chainContext
.dispatch (c
));
1394 case Extension
: return TRACE_RETURN (u
.extension
.dispatch (c
));
1395 default: return TRACE_RETURN (c
->default_return_value ());
1399 inline bool sanitize (hb_sanitize_context_t
*c
, unsigned int lookup_type
) {
1400 TRACE_SANITIZE (this);
1401 if (!u
.header
.sub_format
.sanitize (c
))
1402 return TRACE_RETURN (false);
1403 switch (lookup_type
) {
1404 case Single
: return TRACE_RETURN (u
.single
.sanitize (c
));
1405 case Pair
: return TRACE_RETURN (u
.pair
.sanitize (c
));
1406 case Cursive
: return TRACE_RETURN (u
.cursive
.sanitize (c
));
1407 case MarkBase
: return TRACE_RETURN (u
.markBase
.sanitize (c
));
1408 case MarkLig
: return TRACE_RETURN (u
.markLig
.sanitize (c
));
1409 case MarkMark
: return TRACE_RETURN (u
.markMark
.sanitize (c
));
1410 case Context
: return TRACE_RETURN (u
.context
.sanitize (c
));
1411 case ChainContext
: return TRACE_RETURN (u
.chainContext
.sanitize (c
));
1412 case Extension
: return TRACE_RETURN (u
.extension
.sanitize (c
));
1413 default: return TRACE_RETURN (true);
1425 MarkBasePos markBase
;
1427 MarkMarkPos markMark
;
1429 ChainContextPos chainContext
;
1430 ExtensionPos extension
;
1433 DEFINE_SIZE_UNION (2, header
.sub_format
);
1437 struct PosLookup
: Lookup
1439 inline const PosLookupSubTable
& get_subtable (unsigned int i
) const
1440 { return this+CastR
<OffsetArrayOf
<PosLookupSubTable
> > (subTable
)[i
]; }
1442 inline bool is_reverse (void) const
1447 inline hb_collect_glyphs_context_t::return_t
collect_glyphs (hb_collect_glyphs_context_t
*c
) const
1449 TRACE_COLLECT_GLYPHS (this);
1450 c
->set_recurse_func (NULL
);
1451 return TRACE_RETURN (dispatch (c
));
1454 template <typename set_t
>
1455 inline void add_coverage (set_t
*glyphs
) const
1457 hb_get_coverage_context_t c
;
1458 const Coverage
*last
= NULL
;
1459 unsigned int count
= get_subtable_count ();
1460 for (unsigned int i
= 0; i
< count
; i
++) {
1461 const Coverage
*coverage
= &get_subtable (i
).dispatch (&c
, get_type ());
1462 if (coverage
!= last
) {
1463 coverage
->add_coverage (glyphs
);
1469 inline bool apply_once (hb_apply_context_t
*c
) const
1472 if (!c
->check_glyph_property (&c
->buffer
->cur(), c
->lookup_props
))
1473 return TRACE_RETURN (false);
1474 return TRACE_RETURN (dispatch (c
));
1477 static bool apply_recurse_func (hb_apply_context_t
*c
, unsigned int lookup_index
);
1479 template <typename context_t
>
1480 static inline typename
context_t::return_t
dispatch_recurse_func (context_t
*c
, unsigned int lookup_index
);
1482 template <typename context_t
>
1483 inline typename
context_t::return_t
dispatch (context_t
*c
) const
1485 TRACE_DISPATCH (this);
1486 unsigned int lookup_type
= get_type ();
1487 unsigned int count
= get_subtable_count ();
1488 for (unsigned int i
= 0; i
< count
; i
++) {
1489 typename
context_t::return_t r
= get_subtable (i
).dispatch (c
, lookup_type
);
1490 if (c
->stop_sublookup_iteration (r
))
1491 return TRACE_RETURN (r
);
1493 return TRACE_RETURN (c
->default_return_value ());
1496 inline bool sanitize (hb_sanitize_context_t
*c
) {
1497 TRACE_SANITIZE (this);
1498 if (unlikely (!Lookup::sanitize (c
))) return TRACE_RETURN (false);
1499 OffsetArrayOf
<PosLookupSubTable
> &list
= CastR
<OffsetArrayOf
<PosLookupSubTable
> > (subTable
);
1500 return TRACE_RETURN (list
.sanitize (c
, this, get_type ()));
1504 typedef OffsetListOf
<PosLookup
> PosLookupList
;
1507 * GPOS -- The Glyph Positioning Table
1510 struct GPOS
: GSUBGPOS
1512 static const hb_tag_t tableTag
= HB_OT_TAG_GPOS
;
1514 inline const PosLookup
& get_lookup (unsigned int i
) const
1515 { return CastR
<PosLookup
> (GSUBGPOS::get_lookup (i
)); }
1517 static inline void position_start (hb_font_t
*font
, hb_buffer_t
*buffer
);
1518 static inline void position_finish (hb_font_t
*font
, hb_buffer_t
*buffer
);
1520 inline bool sanitize (hb_sanitize_context_t
*c
) {
1521 TRACE_SANITIZE (this);
1522 if (unlikely (!GSUBGPOS::sanitize (c
))) return TRACE_RETURN (false);
1523 OffsetTo
<PosLookupList
> &list
= CastR
<OffsetTo
<PosLookupList
> > (lookupList
);
1524 return TRACE_RETURN (list
.sanitize (c
, this));
1527 DEFINE_SIZE_STATIC (10);
1532 fix_cursive_minor_offset (hb_glyph_position_t
*pos
, unsigned int i
, hb_direction_t direction
)
1534 unsigned int j
= pos
[i
].cursive_chain();
1540 pos
[i
].cursive_chain() = 0;
1542 fix_cursive_minor_offset (pos
, j
, direction
);
1544 if (HB_DIRECTION_IS_HORIZONTAL (direction
))
1545 pos
[i
].y_offset
+= pos
[j
].y_offset
;
1547 pos
[i
].x_offset
+= pos
[j
].x_offset
;
1551 fix_mark_attachment (hb_glyph_position_t
*pos
, unsigned int i
, hb_direction_t direction
)
1553 if (likely (!(pos
[i
].attach_lookback())))
1556 unsigned int j
= i
- pos
[i
].attach_lookback();
1558 pos
[i
].x_offset
+= pos
[j
].x_offset
;
1559 pos
[i
].y_offset
+= pos
[j
].y_offset
;
1561 if (HB_DIRECTION_IS_FORWARD (direction
))
1562 for (unsigned int k
= j
; k
< i
; k
++) {
1563 pos
[i
].x_offset
-= pos
[k
].x_advance
;
1564 pos
[i
].y_offset
-= pos
[k
].y_advance
;
1567 for (unsigned int k
= j
+ 1; k
< i
+ 1; k
++) {
1568 pos
[i
].x_offset
+= pos
[k
].x_advance
;
1569 pos
[i
].y_offset
+= pos
[k
].y_advance
;
1574 GPOS::position_start (hb_font_t
*font HB_UNUSED
, hb_buffer_t
*buffer
)
1576 buffer
->clear_positions ();
1578 unsigned int count
= buffer
->len
;
1579 for (unsigned int i
= 0; i
< count
; i
++)
1580 buffer
->pos
[i
].attach_lookback() = buffer
->pos
[i
].cursive_chain() = 0;
1584 GPOS::position_finish (hb_font_t
*font HB_UNUSED
, hb_buffer_t
*buffer
)
1587 hb_glyph_position_t
*pos
= hb_buffer_get_glyph_positions (buffer
, &len
);
1588 hb_direction_t direction
= buffer
->props
.direction
;
1590 /* Handle cursive connections */
1591 for (unsigned int i
= 0; i
< len
; i
++)
1592 fix_cursive_minor_offset (pos
, i
, direction
);
1594 /* Handle attachments */
1595 for (unsigned int i
= 0; i
< len
; i
++)
1596 fix_mark_attachment (pos
, i
, direction
);
1598 _hb_buffer_deallocate_gsubgpos_vars (buffer
);
1602 /* Out-of-class implementation for methods recursing */
1604 template <typename context_t
>
1605 inline typename
context_t::return_t
PosLookup::dispatch_recurse_func (context_t
*c
, unsigned int lookup_index
)
1607 const GPOS
&gpos
= *(hb_ot_layout_from_face (c
->face
)->gpos
);
1608 const PosLookup
&l
= gpos
.get_lookup (lookup_index
);
1609 return l
.dispatch (c
);
1612 inline bool PosLookup::apply_recurse_func (hb_apply_context_t
*c
, unsigned int lookup_index
)
1614 const GPOS
&gpos
= *(hb_ot_layout_from_face (c
->face
)->gpos
);
1615 const PosLookup
&l
= gpos
.get_lookup (lookup_index
);
1616 unsigned int saved_lookup_props
= c
->lookup_props
;
1618 bool ret
= l
.apply_once (c
);
1619 c
->lookup_props
= saved_lookup_props
;
1624 #undef attach_lookback
1625 #undef cursive_chain
1628 } /* namespace OT */
1631 #endif /* HB_OT_LAYOUT_GPOS_TABLE_HH */