2 * Copyright © 2007,2008,2009,2010 Red Hat, Inc.
3 * Copyright © 2012 Google, Inc.
5 * This is part of HarfBuzz, a text shaping library.
7 * Permission is hereby granted, without written agreement and without
8 * license or royalty fees, to use, copy, modify, and distribute this
9 * software and its documentation for any purpose, provided that the
10 * above copyright notice and the following two paragraphs appear in
11 * all copies of this software.
13 * IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR
14 * DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES
15 * ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN
16 * IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
19 * THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING,
20 * BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
21 * FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE PROVIDED HEREUNDER IS
22 * ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO
23 * PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
25 * Red Hat Author(s): Behdad Esfahbod
26 * Google Author(s): Behdad Esfahbod
29 #ifndef HB_OPEN_TYPE_PRIVATE_HH
30 #define HB_OPEN_TYPE_PRIVATE_HH
32 #include "hb-private.hh"
45 /* Cast to struct T, reference to reference */
46 template<typename Type
, typename TObject
>
47 inline const Type
& CastR(const TObject
&X
)
48 { return reinterpret_cast<const Type
&> (X
); }
49 template<typename Type
, typename TObject
>
50 inline Type
& CastR(TObject
&X
)
51 { return reinterpret_cast<Type
&> (X
); }
53 /* Cast to struct T, pointer to pointer */
54 template<typename Type
, typename TObject
>
55 inline const Type
* CastP(const TObject
*X
)
56 { return reinterpret_cast<const Type
*> (X
); }
57 template<typename Type
, typename TObject
>
58 inline Type
* CastP(TObject
*X
)
59 { return reinterpret_cast<Type
*> (X
); }
61 /* StructAtOffset<T>(P,Ofs) returns the struct T& that is placed at memory
62 * location pointed to by P plus Ofs bytes. */
63 template<typename Type
>
64 inline const Type
& StructAtOffset(const void *P
, unsigned int offset
)
65 { return * reinterpret_cast<const Type
*> ((const char *) P
+ offset
); }
66 template<typename Type
>
67 inline Type
& StructAtOffset(void *P
, unsigned int offset
)
68 { return * reinterpret_cast<Type
*> ((char *) P
+ offset
); }
70 /* StructAfter<T>(X) returns the struct T& that is placed after X.
71 * Works with X of variable size also. X must implement get_size() */
72 template<typename Type
, typename TObject
>
73 inline const Type
& StructAfter(const TObject
&X
)
74 { return StructAtOffset
<Type
>(&X
, X
.get_size()); }
75 template<typename Type
, typename TObject
>
76 inline Type
& StructAfter(TObject
&X
)
77 { return StructAtOffset
<Type
>(&X
, X
.get_size()); }
85 /* Check _assertion in a method environment */
86 #define _DEFINE_INSTANCE_ASSERTION1(_line, _assertion) \
87 inline void _instance_assertion_on_line_##_line (void) const \
89 ASSERT_STATIC (_assertion); \
90 ASSERT_INSTANCE_POD (*this); /* Make sure it's POD. */ \
92 # define _DEFINE_INSTANCE_ASSERTION0(_line, _assertion) _DEFINE_INSTANCE_ASSERTION1 (_line, _assertion)
93 # define DEFINE_INSTANCE_ASSERTION(_assertion) _DEFINE_INSTANCE_ASSERTION0 (__LINE__, _assertion)
95 /* Check that _code compiles in a method environment */
96 #define _DEFINE_COMPILES_ASSERTION1(_line, _code) \
97 inline void _compiles_assertion_on_line_##_line (void) const \
99 # define _DEFINE_COMPILES_ASSERTION0(_line, _code) _DEFINE_COMPILES_ASSERTION1 (_line, _code)
100 # define DEFINE_COMPILES_ASSERTION(_code) _DEFINE_COMPILES_ASSERTION0 (__LINE__, _code)
103 #define DEFINE_SIZE_STATIC(size) \
104 DEFINE_INSTANCE_ASSERTION (sizeof (*this) == (size)); \
105 static const unsigned int static_size = (size); \
106 static const unsigned int min_size = (size)
108 /* Size signifying variable-sized array */
111 #define DEFINE_SIZE_UNION(size, _member) \
112 DEFINE_INSTANCE_ASSERTION (this->u._member.static_size == (size)); \
113 static const unsigned int min_size = (size)
115 #define DEFINE_SIZE_MIN(size) \
116 DEFINE_INSTANCE_ASSERTION (sizeof (*this) >= (size)); \
117 static const unsigned int min_size = (size)
119 #define DEFINE_SIZE_ARRAY(size, array) \
120 DEFINE_INSTANCE_ASSERTION (sizeof (*this) == (size) + sizeof (array[0])); \
121 DEFINE_COMPILES_ASSERTION ((void) array[0].static_size) \
122 static const unsigned int min_size = (size)
124 #define DEFINE_SIZE_ARRAY2(size, array1, array2) \
125 DEFINE_INSTANCE_ASSERTION (sizeof (*this) == (size) + sizeof (this->array1[0]) + sizeof (this->array2[0])); \
126 DEFINE_COMPILES_ASSERTION ((void) array1[0].static_size; (void) array2[0].static_size) \
127 static const unsigned int min_size = (size)
135 /* Global nul-content Null pool. Enlarge as necessary. */
136 /* TODO This really should be a extern HB_INTERNAL and defined somewhere... */
137 static const void *_NullPool
[64 / sizeof (void *)];
139 /* Generic nul-content Null objects. */
140 template <typename Type
>
141 static inline const Type
& Null (void) {
142 ASSERT_STATIC (sizeof (Type
) <= sizeof (_NullPool
));
143 return *CastP
<Type
> (_NullPool
);
146 /* Specializaiton for arbitrary-content arbitrary-sized Null objects. */
147 #define DEFINE_NULL_DATA(Type, data) \
148 static const char _Null##Type[sizeof (Type) + 1] = data; /* +1 is for nul-termination in data */ \
150 inline const Type& Null<Type> (void) { \
151 return *CastP<Type> (_Null##Type); \
152 } /* The following line really exists such that we end in a place needing semicolon */ \
153 ASSERT_STATIC (Type::min_size + 1 <= sizeof (_Null##Type))
155 /* Accessor macro. */
156 #define Null(Type) Null<Type>()
164 #ifndef HB_DEBUG_SANITIZE
165 #define HB_DEBUG_SANITIZE (HB_DEBUG+0)
169 #define TRACE_SANITIZE(this) \
170 hb_auto_trace_t<HB_DEBUG_SANITIZE, bool> trace \
171 (&c->debug_depth, c->get_name (), this, HB_FUNC, \
174 /* This limits sanitizing time on really broken fonts. */
175 #ifndef HB_SANITIZE_MAX_EDITS
176 #define HB_SANITIZE_MAX_EDITS 100
179 struct hb_sanitize_context_t
181 inline const char *get_name (void) { return "SANITIZE"; }
182 static const unsigned int max_debug_depth
= HB_DEBUG_SANITIZE
;
183 typedef bool return_t
;
184 template <typename T
>
185 inline return_t
dispatch (const T
&obj
) { return obj
.sanitize (this); }
186 static return_t
default_return_value (void) { return true; }
187 bool stop_sublookup_iteration (const return_t r HB_UNUSED
) const { return false; }
189 inline void init (hb_blob_t
*b
)
191 this->blob
= hb_blob_reference (b
);
192 this->writable
= false;
195 inline void start_processing (void)
197 this->start
= hb_blob_get_data (this->blob
, NULL
);
198 this->end
= this->start
+ hb_blob_get_length (this->blob
);
199 this->edit_count
= 0;
200 this->debug_depth
= 0;
202 DEBUG_MSG_LEVEL (SANITIZE
, this->blob
, 0, +1,
203 "start [%p..%p] (%lu bytes)",
204 this->start
, this->end
,
205 (unsigned long) (this->end
- this->start
));
208 inline void end_processing (void)
210 DEBUG_MSG_LEVEL (SANITIZE
, this->blob
, 0, -1,
211 "end [%p..%p] %u edit requests",
212 this->start
, this->end
, this->edit_count
);
214 hb_blob_destroy (this->blob
);
216 this->start
= this->end
= NULL
;
219 inline bool check_range (const void *base
, unsigned int len
) const
221 const char *p
= (const char *) base
;
223 hb_auto_trace_t
<HB_DEBUG_SANITIZE
, bool> trace
224 (&this->debug_depth
, "SANITIZE", this->blob
, NULL
,
225 "check_range [%p..%p] (%d bytes) in [%p..%p]",
227 this->start
, this->end
);
229 return TRACE_RETURN (likely (this->start
<= p
&& p
<= this->end
&& (unsigned int) (this->end
- p
) >= len
));
232 inline bool check_array (const void *base
, unsigned int record_size
, unsigned int len
) const
234 const char *p
= (const char *) base
;
235 bool overflows
= _hb_unsigned_int_mul_overflows (len
, record_size
);
237 hb_auto_trace_t
<HB_DEBUG_SANITIZE
, bool> trace
238 (&this->debug_depth
, "SANITIZE", this->blob
, NULL
,
239 "check_array [%p..%p] (%d*%d=%ld bytes) in [%p..%p]",
240 p
, p
+ (record_size
* len
), record_size
, len
, (unsigned long) record_size
* len
,
241 this->start
, this->end
);
243 return TRACE_RETURN (likely (!overflows
&& this->check_range (base
, record_size
* len
)));
246 template <typename Type
>
247 inline bool check_struct (const Type
*obj
) const
249 return likely (this->check_range (obj
, obj
->min_size
));
252 inline bool may_edit (const void *base HB_UNUSED
, unsigned int len HB_UNUSED
)
254 if (this->edit_count
>= HB_SANITIZE_MAX_EDITS
)
257 const char *p
= (const char *) base
;
260 hb_auto_trace_t
<HB_DEBUG_SANITIZE
, bool> trace
261 (&this->debug_depth
, "SANITIZE", this->blob
, NULL
,
262 "may_edit(%u) [%p..%p] (%d bytes) in [%p..%p] -> %s",
265 this->start
, this->end
,
266 this->writable
? "GRANTED" : "DENIED");
268 return TRACE_RETURN (this->writable
);
271 mutable unsigned int debug_depth
;
272 const char *start
, *end
;
274 unsigned int edit_count
;
280 /* Template to sanitize an object. */
281 template <typename Type
>
284 static hb_blob_t
*sanitize (hb_blob_t
*blob
) {
285 hb_sanitize_context_t c
[1] = {{0}};
288 /* TODO is_sane() stuff */
293 DEBUG_MSG_FUNC (SANITIZE
, blob
, "start");
295 c
->start_processing ();
297 if (unlikely (!c
->start
)) {
298 c
->end_processing ();
302 Type
*t
= CastP
<Type
> (const_cast<char *> (c
->start
));
304 sane
= t
->sanitize (c
);
307 DEBUG_MSG_FUNC (SANITIZE
, blob
, "passed first round with %d edits; going for second round", c
->edit_count
);
309 /* sanitize again to ensure no toe-stepping */
311 sane
= t
->sanitize (c
);
313 DEBUG_MSG_FUNC (SANITIZE
, blob
, "requested %d edits in second round; FAILLING", c
->edit_count
);
318 unsigned int edit_count
= c
->edit_count
;
319 if (edit_count
&& !c
->writable
) {
320 c
->start
= hb_blob_get_data_writable (blob
, NULL
);
321 c
->end
= c
->start
+ hb_blob_get_length (blob
);
325 /* ok, we made it writable by relocating. try again */
326 DEBUG_MSG_FUNC (SANITIZE
, blob
, "retry");
332 c
->end_processing ();
334 DEBUG_MSG_FUNC (SANITIZE
, blob
, sane
? "PASSED" : "FAILED");
338 hb_blob_destroy (blob
);
339 return hb_blob_get_empty ();
343 static const Type
* lock_instance (hb_blob_t
*blob
) {
344 hb_blob_make_immutable (blob
);
345 const char *base
= hb_blob_get_data (blob
, NULL
);
346 return unlikely (!base
) ? &Null(Type
) : CastP
<Type
> (base
);
356 #ifndef HB_DEBUG_SERIALIZE
357 #define HB_DEBUG_SERIALIZE (HB_DEBUG+0)
361 #define TRACE_SERIALIZE(this) \
362 hb_auto_trace_t<HB_DEBUG_SERIALIZE, bool> trace \
363 (&c->debug_depth, "SERIALIZE", c, HB_FUNC, \
367 struct hb_serialize_context_t
369 inline hb_serialize_context_t (void *start
, unsigned int size
)
371 this->start
= (char *) start
;
372 this->end
= this->start
+ size
;
374 this->ran_out_of_room
= false;
375 this->head
= this->start
;
376 this->debug_depth
= 0;
379 template <typename Type
>
380 inline Type
*start_serialize (void)
382 DEBUG_MSG_LEVEL (SERIALIZE
, this->start
, 0, +1,
383 "start [%p..%p] (%lu bytes)",
384 this->start
, this->end
,
385 (unsigned long) (this->end
- this->start
));
387 return start_embed
<Type
> ();
390 inline void end_serialize (void)
392 DEBUG_MSG_LEVEL (SERIALIZE
, this->start
, 0, -1,
393 "end [%p..%p] serialized %d bytes; %s",
394 this->start
, this->end
,
395 (int) (this->head
- this->start
),
396 this->ran_out_of_room
? "RAN OUT OF ROOM" : "did not ran out of room");
400 template <typename Type
>
401 inline Type
*copy (void)
403 assert (!this->ran_out_of_room
);
404 unsigned int len
= this->head
- this->start
;
405 void *p
= malloc (len
);
407 memcpy (p
, this->start
, len
);
408 return reinterpret_cast<Type
*> (p
);
411 template <typename Type
>
412 inline Type
*allocate_size (unsigned int size
)
414 if (unlikely (this->ran_out_of_room
|| this->end
- this->head
< ptrdiff_t (size
))) {
415 this->ran_out_of_room
= true;
418 memset (this->head
, 0, size
);
419 char *ret
= this->head
;
421 return reinterpret_cast<Type
*> (ret
);
424 template <typename Type
>
425 inline Type
*allocate_min (void)
427 return this->allocate_size
<Type
> (Type::min_size
);
430 template <typename Type
>
431 inline Type
*start_embed (void)
433 Type
*ret
= reinterpret_cast<Type
*> (this->head
);
437 template <typename Type
>
438 inline Type
*embed (const Type
&obj
)
440 unsigned int size
= obj
.get_size ();
441 Type
*ret
= this->allocate_size
<Type
> (size
);
442 if (unlikely (!ret
)) return NULL
;
443 memcpy (ret
, obj
, size
);
447 template <typename Type
>
448 inline Type
*extend_min (Type
&obj
)
450 unsigned int size
= obj
.min_size
;
451 assert (this->start
<= (char *) &obj
&& (char *) &obj
<= this->head
&& (char *) &obj
+ size
>= this->head
);
452 if (unlikely (!this->allocate_size
<Type
> (((char *) &obj
) + size
- this->head
))) return NULL
;
453 return reinterpret_cast<Type
*> (&obj
);
456 template <typename Type
>
457 inline Type
*extend (Type
&obj
)
459 unsigned int size
= obj
.get_size ();
460 assert (this->start
< (char *) &obj
&& (char *) &obj
<= this->head
&& (char *) &obj
+ size
>= this->head
);
461 if (unlikely (!this->allocate_size
<Type
> (((char *) &obj
) + size
- this->head
))) return NULL
;
462 return reinterpret_cast<Type
*> (&obj
);
465 inline void truncate (void *head
)
467 assert (this->start
< head
&& head
<= this->head
);
468 this->head
= (char *) head
;
471 unsigned int debug_depth
;
472 char *start
, *end
, *head
;
473 bool ran_out_of_room
;
476 template <typename Type
>
479 inline Supplier (const Type
*array
, unsigned int len_
)
484 inline const Type
operator [] (unsigned int i
) const
486 if (unlikely (i
>= len
)) return Type ();
490 inline void advance (unsigned int count
)
492 if (unlikely (count
> len
))
499 inline Supplier (const Supplier
<Type
> &); /* Disallow copy */
500 inline Supplier
<Type
>& operator= (const Supplier
<Type
> &); /* Disallow copy */
511 * The OpenType Font File: Data Types
515 /* "The following data types are used in the OpenType font file.
516 * All OpenType fonts use Motorola-style byte ordering (Big Endian):" */
523 template <typename Type
, int Bytes
> struct BEInt
;
525 template <typename Type
>
526 struct BEInt
<Type
, 2>
529 inline void set (Type i
) { hb_be_uint16_put (v
,i
); }
530 inline operator Type (void) const { return hb_be_uint16_get (v
); }
531 inline bool operator == (const BEInt
<Type
, 2>& o
) const { return hb_be_uint16_eq (v
, o
.v
); }
532 inline bool operator != (const BEInt
<Type
, 2>& o
) const { return !(*this == o
); }
533 private: uint8_t v
[2];
535 template <typename Type
>
536 struct BEInt
<Type
, 4>
539 inline void set (Type i
) { hb_be_uint32_put (v
,i
); }
540 inline operator Type (void) const { return hb_be_uint32_get (v
); }
541 inline bool operator == (const BEInt
<Type
, 4>& o
) const { return hb_be_uint32_eq (v
, o
.v
); }
542 inline bool operator != (const BEInt
<Type
, 4>& o
) const { return !(*this == o
); }
543 private: uint8_t v
[4];
545 template <typename Type
>
546 struct BEInt
<Type
, 3>
549 inline void set (Type i
) { hb_be_uint24_put (v
,i
); }
550 inline operator Type (void) const { return hb_be_uint24_get (v
); }
551 inline bool operator == (const BEInt
<Type
, 3>& o
) const { return hb_be_uint24_eq (v
, o
.v
); }
552 inline bool operator != (const BEInt
<Type
, 3>& o
) const { return !(*this == o
); }
553 private: uint8_t v
[3];
556 /* Integer types in big-endian order and no alignment requirement */
557 template <typename Type
, unsigned int Size
>
560 inline void set (Type i
) { v
.set (i
); }
561 inline operator Type(void) const { return v
; }
562 inline bool operator == (const IntType
<Type
,Size
> &o
) const { return v
== o
.v
; }
563 inline bool operator != (const IntType
<Type
,Size
> &o
) const { return v
!= o
.v
; }
564 static inline int cmp (const IntType
<Type
,Size
> *a
, const IntType
<Type
,Size
> *b
) { return b
->cmp (*a
); }
565 inline int cmp (IntType
<Type
,Size
> va
) const { Type a
= va
; Type b
= v
; return a
< b
? -1 : a
== b
? 0 : +1; }
566 inline int cmp (Type a
) const { Type b
= v
; return a
< b
? -1 : a
== b
? 0 : +1; }
567 inline bool sanitize (hb_sanitize_context_t
*c
) {
568 TRACE_SANITIZE (this);
569 return TRACE_RETURN (likely (c
->check_struct (this)));
574 DEFINE_SIZE_STATIC (Size
);
577 typedef IntType
<uint16_t, 2> USHORT
; /* 16-bit unsigned integer. */
578 typedef IntType
<int16_t, 2> SHORT
; /* 16-bit signed integer. */
579 typedef IntType
<uint32_t, 4> ULONG
; /* 32-bit unsigned integer. */
580 typedef IntType
<int32_t, 4> LONG
; /* 32-bit signed integer. */
581 typedef IntType
<uint32_t, 3> UINT24
; /* 24-bit unsigned integer. */
583 /* 16-bit signed integer (SHORT) that describes a quantity in FUnits. */
586 /* 16-bit unsigned integer (USHORT) that describes a quantity in FUnits. */
587 typedef USHORT UFWORD
;
589 /* Date represented in number of seconds since 12:00 midnight, January 1,
590 * 1904. The value is represented as a signed 64-bit integer. */
593 inline bool sanitize (hb_sanitize_context_t
*c
) {
594 TRACE_SANITIZE (this);
595 return TRACE_RETURN (likely (c
->check_struct (this)));
601 DEFINE_SIZE_STATIC (8);
604 /* Array of four uint8s (length = 32 bits) used to identify a script, language
605 * system, feature, or baseline */
608 /* What the char* converters return is NOT nul-terminated. Print using "%.4s" */
609 inline operator const char* (void) const { return reinterpret_cast<const char *> (&this->v
); }
610 inline operator char* (void) { return reinterpret_cast<char *> (&this->v
); }
612 DEFINE_SIZE_STATIC (4);
614 DEFINE_NULL_DATA (Tag
, " ");
616 /* Glyph index number, same as uint16 (length = 16 bits) */
617 typedef USHORT GlyphID
;
619 /* Script/language-system/feature index */
620 struct Index
: USHORT
{
621 static const unsigned int NOT_FOUND_INDEX
= 0xFFFF;
623 DEFINE_NULL_DATA (Index
, "\xff\xff");
625 /* Offset to a table, same as uint16 (length = 16 bits), Null offset = 0x0000 */
626 struct Offset
: USHORT
628 inline bool is_null (void) const { return 0 == *this; }
630 DEFINE_SIZE_STATIC (2);
633 /* LongOffset to a table, same as uint32 (length = 32 bits), Null offset = 0x00000000 */
634 struct LongOffset
: ULONG
636 inline bool is_null (void) const { return 0 == *this; }
638 DEFINE_SIZE_STATIC (4);
643 struct CheckSum
: ULONG
645 /* This is reference implementation from the spec. */
646 static inline uint32_t CalcTableChecksum (const ULONG
*Table
, uint32_t Length
)
649 const ULONG
*EndPtr
= Table
+((Length
+3) & ~3) / ULONG::static_size
;
651 while (Table
< EndPtr
)
656 /* Note: data should be 4byte aligned and have 4byte padding at the end. */
657 inline void set_for_data (const void *data
, unsigned int length
)
658 { set (CalcTableChecksum ((const ULONG
*) data
, length
)); }
661 DEFINE_SIZE_STATIC (4);
671 inline uint32_t to_int (void) const { return (major
<< 16) + minor
; }
673 inline bool sanitize (hb_sanitize_context_t
*c
) {
674 TRACE_SANITIZE (this);
675 return TRACE_RETURN (c
->check_struct (this));
681 DEFINE_SIZE_STATIC (4);
687 * Template subclasses of Offset and LongOffset that do the dereferencing.
691 template <typename OffsetType
, typename Type
>
692 struct GenericOffsetTo
: OffsetType
694 inline const Type
& operator () (const void *base
) const
696 unsigned int offset
= *this;
697 if (unlikely (!offset
)) return Null(Type
);
698 return StructAtOffset
<Type
> (base
, offset
);
701 inline Type
& serialize (hb_serialize_context_t
*c
, void *base
)
703 Type
*t
= c
->start_embed
<Type
> ();
704 this->set ((char *) t
- (char *) base
); /* TODO(serialize) Overflow? */
708 inline bool sanitize (hb_sanitize_context_t
*c
, void *base
) {
709 TRACE_SANITIZE (this);
710 if (unlikely (!c
->check_struct (this))) return TRACE_RETURN (false);
711 unsigned int offset
= *this;
712 if (unlikely (!offset
)) return TRACE_RETURN (true);
713 Type
&obj
= StructAtOffset
<Type
> (base
, offset
);
714 return TRACE_RETURN (likely (obj
.sanitize (c
)) || neuter (c
));
716 template <typename T
>
717 inline bool sanitize (hb_sanitize_context_t
*c
, void *base
, T user_data
) {
718 TRACE_SANITIZE (this);
719 if (unlikely (!c
->check_struct (this))) return TRACE_RETURN (false);
720 unsigned int offset
= *this;
721 if (unlikely (!offset
)) return TRACE_RETURN (true);
722 Type
&obj
= StructAtOffset
<Type
> (base
, offset
);
723 return TRACE_RETURN (likely (obj
.sanitize (c
, user_data
)) || neuter (c
));
726 inline bool try_set (hb_sanitize_context_t
*c
, const OffsetType
&v
) {
727 if (c
->may_edit (this, this->static_size
)) {
733 /* Set the offset to Null */
734 inline bool neuter (hb_sanitize_context_t
*c
) {
735 if (c
->may_edit (this, this->static_size
)) {
736 this->set (0); /* 0 is Null offset */
742 template <typename Base
, typename OffsetType
, typename Type
>
743 inline const Type
& operator + (const Base
&base
, const GenericOffsetTo
<OffsetType
, Type
> &offset
) { return offset (base
); }
744 template <typename Base
, typename OffsetType
, typename Type
>
745 inline Type
& operator + (Base
&base
, GenericOffsetTo
<OffsetType
, Type
> &offset
) { return offset (base
); }
747 template <typename Type
>
748 struct OffsetTo
: GenericOffsetTo
<Offset
, Type
> {};
750 template <typename Type
>
751 struct LongOffsetTo
: GenericOffsetTo
<LongOffset
, Type
> {};
758 template <typename LenType
, typename Type
>
759 struct GenericArrayOf
761 const Type
*sub_array (unsigned int start_offset
, unsigned int *pcount
/* IN/OUT */) const
763 unsigned int count
= len
;
764 if (unlikely (start_offset
> count
))
767 count
-= start_offset
;
768 count
= MIN (count
, *pcount
);
770 return array
+ start_offset
;
773 inline const Type
& operator [] (unsigned int i
) const
775 if (unlikely (i
>= len
)) return Null(Type
);
778 inline Type
& operator [] (unsigned int i
)
782 inline unsigned int get_size (void) const
783 { return len
.static_size
+ len
* Type::static_size
; }
785 inline bool serialize (hb_serialize_context_t
*c
,
786 unsigned int items_len
)
788 TRACE_SERIALIZE (this);
789 if (unlikely (!c
->extend_min (*this))) return TRACE_RETURN (false);
790 len
.set (items_len
); /* TODO(serialize) Overflow? */
791 if (unlikely (!c
->extend (*this))) return TRACE_RETURN (false);
792 return TRACE_RETURN (true);
795 inline bool serialize (hb_serialize_context_t
*c
,
796 Supplier
<Type
> &items
,
797 unsigned int items_len
)
799 TRACE_SERIALIZE (this);
800 if (unlikely (!serialize (c
, items_len
))) return TRACE_RETURN (false);
801 for (unsigned int i
= 0; i
< items_len
; i
++)
803 items
.advance (items_len
);
804 return TRACE_RETURN (true);
807 inline bool sanitize (hb_sanitize_context_t
*c
) {
808 TRACE_SANITIZE (this);
809 if (unlikely (!sanitize_shallow (c
))) return TRACE_RETURN (false);
811 /* Note: for structs that do not reference other structs,
812 * we do not need to call their sanitize() as we already did
813 * a bound check on the aggregate array size. We just include
814 * a small unreachable expression to make sure the structs
815 * pointed to do have a simple sanitize(), ie. they do not
816 * reference other structs via offsets.
818 (void) (false && array
[0].sanitize (c
));
820 return TRACE_RETURN (true);
822 inline bool sanitize (hb_sanitize_context_t
*c
, void *base
) {
823 TRACE_SANITIZE (this);
824 if (unlikely (!sanitize_shallow (c
))) return TRACE_RETURN (false);
825 unsigned int count
= len
;
826 for (unsigned int i
= 0; i
< count
; i
++)
827 if (unlikely (!array
[i
].sanitize (c
, base
)))
828 return TRACE_RETURN (false);
829 return TRACE_RETURN (true);
831 template <typename T
>
832 inline bool sanitize (hb_sanitize_context_t
*c
, void *base
, T user_data
) {
833 TRACE_SANITIZE (this);
834 if (unlikely (!sanitize_shallow (c
))) return TRACE_RETURN (false);
835 unsigned int count
= len
;
836 for (unsigned int i
= 0; i
< count
; i
++)
837 if (unlikely (!array
[i
].sanitize (c
, base
, user_data
)))
838 return TRACE_RETURN (false);
839 return TRACE_RETURN (true);
843 inline bool sanitize_shallow (hb_sanitize_context_t
*c
) {
844 TRACE_SANITIZE (this);
845 return TRACE_RETURN (c
->check_struct (this) && c
->check_array (this, Type::static_size
, len
));
852 DEFINE_SIZE_ARRAY (sizeof (LenType
), array
);
855 /* An array with a USHORT number of elements. */
856 template <typename Type
>
857 struct ArrayOf
: GenericArrayOf
<USHORT
, Type
> {};
859 /* An array with a ULONG number of elements. */
860 template <typename Type
>
861 struct LongArrayOf
: GenericArrayOf
<ULONG
, Type
> {};
863 /* Array of Offset's */
864 template <typename Type
>
865 struct OffsetArrayOf
: ArrayOf
<OffsetTo
<Type
> > {};
867 /* Array of LongOffset's */
868 template <typename Type
>
869 struct LongOffsetArrayOf
: ArrayOf
<LongOffsetTo
<Type
> > {};
871 /* LongArray of LongOffset's */
872 template <typename Type
>
873 struct LongOffsetLongArrayOf
: LongArrayOf
<LongOffsetTo
<Type
> > {};
875 /* Array of offsets relative to the beginning of the array itself. */
876 template <typename Type
>
877 struct OffsetListOf
: OffsetArrayOf
<Type
>
879 inline const Type
& operator [] (unsigned int i
) const
881 if (unlikely (i
>= this->len
)) return Null(Type
);
882 return this+this->array
[i
];
885 inline bool sanitize (hb_sanitize_context_t
*c
) {
886 TRACE_SANITIZE (this);
887 return TRACE_RETURN (OffsetArrayOf
<Type
>::sanitize (c
, this));
889 template <typename T
>
890 inline bool sanitize (hb_sanitize_context_t
*c
, T user_data
) {
891 TRACE_SANITIZE (this);
892 return TRACE_RETURN (OffsetArrayOf
<Type
>::sanitize (c
, this, user_data
));
897 /* An array with a USHORT number of elements,
898 * starting at second element. */
899 template <typename Type
>
900 struct HeadlessArrayOf
902 inline const Type
& operator [] (unsigned int i
) const
904 if (unlikely (i
>= len
|| !i
)) return Null(Type
);
907 inline unsigned int get_size (void) const
908 { return len
.static_size
+ (len
? len
- 1 : 0) * Type::static_size
; }
910 inline bool serialize (hb_serialize_context_t
*c
,
911 Supplier
<Type
> &items
,
912 unsigned int items_len
)
914 TRACE_SERIALIZE (this);
915 if (unlikely (!c
->extend_min (*this))) return TRACE_RETURN (false);
916 len
.set (items_len
); /* TODO(serialize) Overflow? */
917 if (unlikely (!items_len
)) return TRACE_RETURN (true);
918 if (unlikely (!c
->extend (*this))) return TRACE_RETURN (false);
919 for (unsigned int i
= 0; i
< items_len
- 1; i
++)
921 items
.advance (items_len
- 1);
922 return TRACE_RETURN (true);
925 inline bool sanitize_shallow (hb_sanitize_context_t
*c
) {
926 return c
->check_struct (this)
927 && c
->check_array (this, Type::static_size
, len
);
930 inline bool sanitize (hb_sanitize_context_t
*c
) {
931 TRACE_SANITIZE (this);
932 if (unlikely (!sanitize_shallow (c
))) return TRACE_RETURN (false);
934 /* Note: for structs that do not reference other structs,
935 * we do not need to call their sanitize() as we already did
936 * a bound check on the aggregate array size. We just include
937 * a small unreachable expression to make sure the structs
938 * pointed to do have a simple sanitize(), ie. they do not
939 * reference other structs via offsets.
941 (void) (false && array
[0].sanitize (c
));
943 return TRACE_RETURN (true);
949 DEFINE_SIZE_ARRAY (sizeof (USHORT
), array
);
953 /* An array with sorted elements. Supports binary searching. */
954 template <typename Type
>
955 struct SortedArrayOf
: ArrayOf
<Type
> {
957 template <typename SearchType
>
958 inline int search (const SearchType
&x
) const
960 /* Hand-coded bsearch here since this is in the hot inner loop. */
961 int min
= 0, max
= (int) this->len
- 1;
964 int mid
= (min
+ max
) / 2;
965 int c
= this->array
[mid
].cmp (x
);
981 #endif /* HB_OPEN_TYPE_PRIVATE_HH */