2 * Copyright © 2007,2008,2009,2010 Red Hat, Inc.
3 * Copyright © 2012 Google, Inc.
5 * This is part of HarfBuzz, a text shaping library.
7 * Permission is hereby granted, without written agreement and without
8 * license or royalty fees, to use, copy, modify, and distribute this
9 * software and its documentation for any purpose, provided that the
10 * above copyright notice and the following two paragraphs appear in
11 * all copies of this software.
13 * IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR
14 * DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES
15 * ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN
16 * IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
19 * THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING,
20 * BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
21 * FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE PROVIDED HEREUNDER IS
22 * ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO
23 * PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
25 * Red Hat Author(s): Behdad Esfahbod
26 * Google Author(s): Behdad Esfahbod
29 #ifndef HB_OPEN_TYPE_PRIVATE_HH
30 #define HB_OPEN_TYPE_PRIVATE_HH
32 #include "hb-private.hh"
44 /* Cast to struct T, reference to reference */
45 template<typename Type
, typename TObject
>
46 inline const Type
& CastR(const TObject
&X
)
47 { return reinterpret_cast<const Type
&> (X
); }
48 template<typename Type
, typename TObject
>
49 inline Type
& CastR(TObject
&X
)
50 { return reinterpret_cast<Type
&> (X
); }
52 /* Cast to struct T, pointer to pointer */
53 template<typename Type
, typename TObject
>
54 inline const Type
* CastP(const TObject
*X
)
55 { return reinterpret_cast<const Type
*> (X
); }
56 template<typename Type
, typename TObject
>
57 inline Type
* CastP(TObject
*X
)
58 { return reinterpret_cast<Type
*> (X
); }
60 /* StructAtOffset<T>(P,Ofs) returns the struct T& that is placed at memory
61 * location pointed to by P plus Ofs bytes. */
62 template<typename Type
>
63 inline const Type
& StructAtOffset(const void *P
, unsigned int offset
)
64 { return * reinterpret_cast<const Type
*> ((const char *) P
+ offset
); }
65 template<typename Type
>
66 inline Type
& StructAtOffset(void *P
, unsigned int offset
)
67 { return * reinterpret_cast<Type
*> ((char *) P
+ offset
); }
69 /* StructAfter<T>(X) returns the struct T& that is placed after X.
70 * Works with X of variable size also. X must implement get_size() */
71 template<typename Type
, typename TObject
>
72 inline const Type
& StructAfter(const TObject
&X
)
73 { return StructAtOffset
<Type
>(&X
, X
.get_size()); }
74 template<typename Type
, typename TObject
>
75 inline Type
& StructAfter(TObject
&X
)
76 { return StructAtOffset
<Type
>(&X
, X
.get_size()); }
84 /* Check _assertion in a method environment */
85 #define _DEFINE_INSTANCE_ASSERTION1(_line, _assertion) \
86 inline void _instance_assertion_on_line_##_line (void) const \
88 ASSERT_STATIC (_assertion); \
89 ASSERT_INSTANCE_POD (*this); /* Make sure it's POD. */ \
91 # define _DEFINE_INSTANCE_ASSERTION0(_line, _assertion) _DEFINE_INSTANCE_ASSERTION1 (_line, _assertion)
92 # define DEFINE_INSTANCE_ASSERTION(_assertion) _DEFINE_INSTANCE_ASSERTION0 (__LINE__, _assertion)
94 /* Check that _code compiles in a method environment */
95 #define _DEFINE_COMPILES_ASSERTION1(_line, _code) \
96 inline void _compiles_assertion_on_line_##_line (void) const \
98 # define _DEFINE_COMPILES_ASSERTION0(_line, _code) _DEFINE_COMPILES_ASSERTION1 (_line, _code)
99 # define DEFINE_COMPILES_ASSERTION(_code) _DEFINE_COMPILES_ASSERTION0 (__LINE__, _code)
102 #define DEFINE_SIZE_STATIC(size) \
103 DEFINE_INSTANCE_ASSERTION (sizeof (*this) == (size)); \
104 static const unsigned int static_size = (size); \
105 static const unsigned int min_size = (size)
107 /* Size signifying variable-sized array */
110 #define DEFINE_SIZE_UNION(size, _member) \
111 DEFINE_INSTANCE_ASSERTION (this->u._member.static_size == (size)); \
112 static const unsigned int min_size = (size)
114 #define DEFINE_SIZE_MIN(size) \
115 DEFINE_INSTANCE_ASSERTION (sizeof (*this) >= (size)); \
116 static const unsigned int min_size = (size)
118 #define DEFINE_SIZE_ARRAY(size, array) \
119 DEFINE_INSTANCE_ASSERTION (sizeof (*this) == (size) + sizeof (array[0])); \
120 DEFINE_COMPILES_ASSERTION ((void) array[0].static_size) \
121 static const unsigned int min_size = (size)
123 #define DEFINE_SIZE_ARRAY2(size, array1, array2) \
124 DEFINE_INSTANCE_ASSERTION (sizeof (*this) == (size) + sizeof (this->array1[0]) + sizeof (this->array2[0])); \
125 DEFINE_COMPILES_ASSERTION ((void) array1[0].static_size; (void) array2[0].static_size) \
126 static const unsigned int min_size = (size)
134 /* Global nul-content Null pool. Enlarge as necessary. */
135 /* TODO This really should be a extern HB_INTERNAL and defined somewhere... */
136 static const void *_NullPool
[64 / sizeof (void *)];
138 /* Generic nul-content Null objects. */
139 template <typename Type
>
140 static inline const Type
& Null (void) {
141 ASSERT_STATIC (Type::min_size
<= sizeof (_NullPool
));
142 return *CastP
<Type
> (_NullPool
);
145 /* Specializaiton for arbitrary-content arbitrary-sized Null objects. */
146 #define DEFINE_NULL_DATA(Type, data) \
147 static const char _Null##Type[Type::min_size + 1] = data; /* +1 is for nul-termination in data */ \
149 inline const Type& Null<Type> (void) { \
150 return *CastP<Type> (_Null##Type); \
151 } /* The following line really exists such that we end in a place needing semicolon */ \
152 ASSERT_STATIC (Type::min_size + 1 <= sizeof (_Null##Type))
154 /* Accessor macro. */
155 #define Null(Type) Null<Type>()
163 #ifndef HB_DEBUG_SANITIZE
164 #define HB_DEBUG_SANITIZE (HB_DEBUG+0)
168 #define TRACE_SANITIZE() \
169 hb_auto_trace_t<HB_DEBUG_SANITIZE> trace (&c->debug_depth, "SANITIZE", this, HB_FUNC, "");
172 struct hb_sanitize_context_t
174 inline void init (hb_blob_t
*b
)
176 this->blob
= hb_blob_reference (b
);
177 this->writable
= false;
180 inline void start_processing (void)
182 this->start
= hb_blob_get_data (this->blob
, NULL
);
183 this->end
= this->start
+ hb_blob_get_length (this->blob
);
184 this->edit_count
= 0;
185 this->debug_depth
= 0;
187 DEBUG_MSG_LEVEL (SANITIZE
, this->blob
, 0, +1,
188 "start [%p..%p] (%lu bytes)",
189 this->start
, this->end
,
190 (unsigned long) (this->end
- this->start
));
193 inline void end_processing (void)
195 DEBUG_MSG_LEVEL (SANITIZE
, this->blob
, 0, -1,
196 "end [%p..%p] %u edit requests",
197 this->start
, this->end
, this->edit_count
);
199 hb_blob_destroy (this->blob
);
201 this->start
= this->end
= NULL
;
204 inline bool check_range (const void *base
, unsigned int len
) const
206 const char *p
= (const char *) base
;
208 hb_auto_trace_t
<HB_DEBUG_SANITIZE
> trace (&this->debug_depth
, "SANITIZE", this->blob
, NULL
,
209 "check_range [%p..%p] (%d bytes) in [%p..%p]",
211 this->start
, this->end
);
213 return TRACE_RETURN (likely (this->start
<= p
&& p
<= this->end
&& (unsigned int) (this->end
- p
) >= len
));
216 inline bool check_array (const void *base
, unsigned int record_size
, unsigned int len
) const
218 const char *p
= (const char *) base
;
219 bool overflows
= _hb_unsigned_int_mul_overflows (len
, record_size
);
221 hb_auto_trace_t
<HB_DEBUG_SANITIZE
> trace (&this->debug_depth
, "SANITIZE", this->blob
, NULL
,
222 "check_array [%p..%p] (%d*%d=%ld bytes) in [%p..%p]",
223 p
, p
+ (record_size
* len
), record_size
, len
, (unsigned long) record_size
* len
,
224 this->start
, this->end
);
226 return TRACE_RETURN (likely (!overflows
&& this->check_range (base
, record_size
* len
)));
229 template <typename Type
>
230 inline bool check_struct (const Type
*obj
) const
232 return likely (this->check_range (obj
, obj
->min_size
));
235 inline bool may_edit (const void *base HB_UNUSED
, unsigned int len HB_UNUSED
)
237 const char *p
= (const char *) base
;
240 hb_auto_trace_t
<HB_DEBUG_SANITIZE
> trace (&this->debug_depth
, "SANITIZE", this->blob
, NULL
,
241 "may_edit(%u) [%p..%p] (%d bytes) in [%p..%p] -> %s",
244 this->start
, this->end
);
246 return TRACE_RETURN (this->writable
);
249 mutable unsigned int debug_depth
;
250 const char *start
, *end
;
252 unsigned int edit_count
;
258 /* Template to sanitize an object. */
259 template <typename Type
>
262 static hb_blob_t
*sanitize (hb_blob_t
*blob
) {
263 hb_sanitize_context_t c
[1] = {{0}};
266 /* TODO is_sane() stuff */
271 DEBUG_MSG_FUNC (SANITIZE
, blob
, "start");
273 c
->start_processing ();
275 if (unlikely (!c
->start
)) {
276 c
->end_processing ();
280 Type
*t
= CastP
<Type
> (const_cast<char *> (c
->start
));
282 sane
= t
->sanitize (c
);
285 DEBUG_MSG_FUNC (SANITIZE
, blob
, "passed first round with %d edits; going for second round", c
->edit_count
);
287 /* sanitize again to ensure no toe-stepping */
289 sane
= t
->sanitize (c
);
291 DEBUG_MSG_FUNC (SANITIZE
, blob
, "requested %d edits in second round; FAILLING", c
->edit_count
);
296 unsigned int edit_count
= c
->edit_count
;
297 if (edit_count
&& !c
->writable
) {
298 c
->start
= hb_blob_get_data_writable (blob
, NULL
);
299 c
->end
= c
->start
+ hb_blob_get_length (blob
);
303 /* ok, we made it writable by relocating. try again */
304 DEBUG_MSG_FUNC (SANITIZE
, blob
, "retry");
310 c
->end_processing ();
312 DEBUG_MSG_FUNC (SANITIZE
, blob
, sane
? "PASSED" : "FAILED");
316 hb_blob_destroy (blob
);
317 return hb_blob_get_empty ();
321 static const Type
* lock_instance (hb_blob_t
*blob
) {
322 hb_blob_make_immutable (blob
);
323 const char *base
= hb_blob_get_data (blob
, NULL
);
324 return unlikely (!base
) ? &Null(Type
) : CastP
<Type
> (base
);
334 #ifndef HB_DEBUG_SERIALIZE
335 #define HB_DEBUG_SERIALIZE (HB_DEBUG+0)
339 #define TRACE_SERIALIZE() \
340 hb_auto_trace_t<HB_DEBUG_SERIALIZE> trace (&c->debug_depth, "SERIALIZE", c, HB_FUNC, "");
343 struct hb_serialize_context_t
345 inline hb_serialize_context_t (void *start
, unsigned int size
)
347 this->start
= (char *) start
;
348 this->end
= this->start
+ size
;
350 this->ran_out_of_room
= false;
351 this->head
= this->start
;
352 this->debug_depth
= 0;
355 template <typename Type
>
356 inline Type
*start_serialize (void)
358 DEBUG_MSG_LEVEL (SERIALIZE
, this->start
, 0, +1,
359 "start [%p..%p] (%lu bytes)",
360 this->start
, this->end
,
361 (unsigned long) (this->end
- this->start
));
363 return start_embed
<Type
> ();
366 inline void end_serialize (void)
368 DEBUG_MSG_LEVEL (SERIALIZE
, this->start
, 0, -1,
369 "end [%p..%p] serialized %d bytes; %s",
370 this->start
, this->end
,
371 (int) (this->head
- this->start
),
372 this->ran_out_of_room
? "RAN OUT OF ROOM" : "did not ran out of room");
376 template <typename Type
>
377 inline Type
*copy (void)
379 assert (!this->ran_out_of_room
);
380 unsigned int len
= this->head
- this->start
;
381 void *p
= malloc (len
);
383 memcpy (p
, this->start
, len
);
384 return reinterpret_cast<Type
*> (p
);
387 template <typename Type
>
388 inline Type
*allocate_size (unsigned int size
)
390 if (unlikely (this->ran_out_of_room
|| this->end
- this->head
< size
)) {
391 this->ran_out_of_room
= true;
394 memset (this->head
, 0, size
);
395 char *ret
= this->head
;
397 return reinterpret_cast<Type
*> (ret
);
400 template <typename Type
>
401 inline Type
*allocate_min (void)
403 return this->allocate_size
<Type
> (Type::min_size
);
406 template <typename Type
>
407 inline Type
*start_embed (void)
409 Type
*ret
= reinterpret_cast<Type
*> (this->head
);
413 template <typename Type
>
414 inline Type
*embed (const Type
&obj
)
416 unsigned int size
= obj
.get_size ();
417 Type
*ret
= this->allocate_size
<Type
> (size
);
418 if (unlikely (!ret
)) return NULL
;
419 memcpy (ret
, obj
, size
);
423 template <typename Type
>
424 inline Type
*extend_min (Type
&obj
)
426 unsigned int size
= obj
.min_size
;
427 assert (this->start
<= (char *) &obj
&& (char *) &obj
<= this->head
&& (char *) &obj
+ size
>= this->head
);
428 if (unlikely (!this->allocate_size
<Type
> (((char *) &obj
) + size
- this->head
))) return NULL
;
429 return reinterpret_cast<Type
*> (&obj
);
432 template <typename Type
>
433 inline Type
*extend (Type
&obj
)
435 unsigned int size
= obj
.get_size ();
436 assert (this->start
< (char *) &obj
&& (char *) &obj
<= this->head
&& (char *) &obj
+ size
>= this->head
);
437 if (unlikely (!this->allocate_size
<Type
> (((char *) &obj
) + size
- this->head
))) return NULL
;
438 return reinterpret_cast<Type
*> (&obj
);
441 inline void truncate (void *head
)
443 assert (this->start
< head
&& head
<= this->head
);
444 this->head
= (char *) head
;
447 unsigned int debug_depth
;
448 char *start
, *end
, *head
;
449 bool ran_out_of_room
;
452 template <typename Type
>
455 inline Supplier (const Type
*array
, unsigned int len_
)
460 inline const Type
operator [] (unsigned int i
) const
462 if (unlikely (i
>= len
)) return Type ();
466 inline void advance (unsigned int count
)
468 if (unlikely (count
> len
))
475 inline Supplier (const Supplier
<Type
> &); /* Disallow copy */
476 inline Supplier
<Type
>& operator= (const Supplier
<Type
> &); /* Disallow copy */
487 * The OpenType Font File: Data Types
491 /* "The following data types are used in the OpenType font file.
492 * All OpenType fonts use Motorola-style byte ordering (Big Endian):" */
499 template <typename Type
, int Bytes
> struct BEInt
;
501 template <typename Type
>
502 struct BEInt
<Type
, 2>
505 inline void set (Type i
) { hb_be_uint16_put (v
,i
); }
506 inline operator Type (void) const { return hb_be_uint16_get (v
); }
507 inline bool operator == (const BEInt
<Type
, 2>& o
) const { return hb_be_uint16_eq (v
, o
.v
); }
508 inline bool operator != (const BEInt
<Type
, 2>& o
) const { return !(*this == o
); }
509 private: uint8_t v
[2];
511 template <typename Type
>
512 struct BEInt
<Type
, 4>
515 inline void set (Type i
) { hb_be_uint32_put (v
,i
); }
516 inline operator Type (void) const { return hb_be_uint32_get (v
); }
517 inline bool operator == (const BEInt
<Type
, 4>& o
) const { return hb_be_uint32_eq (v
, o
.v
); }
518 inline bool operator != (const BEInt
<Type
, 4>& o
) const { return !(*this == o
); }
519 private: uint8_t v
[4];
522 /* Integer types in big-endian order and no alignment requirement */
523 template <typename Type
>
526 inline void set (Type i
) { v
.set (i
); }
527 inline operator Type(void) const { return v
; }
528 inline bool operator == (const IntType
<Type
> &o
) const { return v
== o
.v
; }
529 inline bool operator != (const IntType
<Type
> &o
) const { return v
!= o
.v
; }
530 static inline int cmp (const IntType
<Type
> *a
, const IntType
<Type
> *b
) { return b
->cmp (*a
); }
531 inline int cmp (IntType
<Type
> va
) const { Type a
= va
; Type b
= v
; return a
< b
? -1 : a
== b
? 0 : +1; }
532 inline int cmp (Type a
) const { Type b
= v
; return a
< b
? -1 : a
== b
? 0 : +1; }
533 inline bool sanitize (hb_sanitize_context_t
*c
) {
535 return TRACE_RETURN (likely (c
->check_struct (this)));
538 BEInt
<Type
, sizeof (Type
)> v
;
540 DEFINE_SIZE_STATIC (sizeof (Type
));
543 typedef IntType
<uint16_t> USHORT
; /* 16-bit unsigned integer. */
544 typedef IntType
<int16_t> SHORT
; /* 16-bit signed integer. */
545 typedef IntType
<uint32_t> ULONG
; /* 32-bit unsigned integer. */
546 typedef IntType
<int32_t> LONG
; /* 32-bit signed integer. */
548 /* 16-bit signed integer (SHORT) that describes a quantity in FUnits. */
551 /* 16-bit unsigned integer (USHORT) that describes a quantity in FUnits. */
552 typedef USHORT UFWORD
;
554 /* Date represented in number of seconds since 12:00 midnight, January 1,
555 * 1904. The value is represented as a signed 64-bit integer. */
558 inline bool sanitize (hb_sanitize_context_t
*c
) {
560 return TRACE_RETURN (likely (c
->check_struct (this)));
566 DEFINE_SIZE_STATIC (8);
569 /* Array of four uint8s (length = 32 bits) used to identify a script, language
570 * system, feature, or baseline */
573 /* What the char* converters return is NOT nul-terminated. Print using "%.4s" */
574 inline operator const char* (void) const { return reinterpret_cast<const char *> (&this->v
); }
575 inline operator char* (void) { return reinterpret_cast<char *> (&this->v
); }
577 DEFINE_SIZE_STATIC (4);
579 DEFINE_NULL_DATA (Tag
, " ");
581 /* Glyph index number, same as uint16 (length = 16 bits) */
582 typedef USHORT GlyphID
;
584 /* Script/language-system/feature index */
585 struct Index
: USHORT
{
586 static const unsigned int NOT_FOUND_INDEX
= 0xFFFF;
588 DEFINE_NULL_DATA (Index
, "\xff\xff");
590 /* Offset to a table, same as uint16 (length = 16 bits), Null offset = 0x0000 */
591 typedef USHORT Offset
;
593 /* LongOffset to a table, same as uint32 (length = 32 bits), Null offset = 0x00000000 */
594 typedef ULONG LongOffset
;
598 struct CheckSum
: ULONG
600 static uint32_t CalcTableChecksum (ULONG
*Table
, uint32_t Length
)
603 ULONG
*EndPtr
= Table
+((Length
+3) & ~3) / ULONG::static_size
;
605 while (Table
< EndPtr
)
610 DEFINE_SIZE_STATIC (4);
620 inline uint32_t to_int (void) const { return (major
<< 16) + minor
; }
622 inline bool sanitize (hb_sanitize_context_t
*c
) {
624 return TRACE_RETURN (c
->check_struct (this));
630 DEFINE_SIZE_STATIC (4);
636 * Template subclasses of Offset and LongOffset that do the dereferencing.
640 template <typename OffsetType
, typename Type
>
641 struct GenericOffsetTo
: OffsetType
643 inline const Type
& operator () (const void *base
) const
645 unsigned int offset
= *this;
646 if (unlikely (!offset
)) return Null(Type
);
647 return StructAtOffset
<Type
> (base
, offset
);
649 inline Type
& operator () (void *base
)
651 unsigned int offset
= *this;
652 return StructAtOffset
<Type
> (base
, offset
);
655 inline Type
& serialize (hb_serialize_context_t
*c
, void *base
)
657 Type
*t
= c
->start_embed
<Type
> ();
658 this->set ((char *) t
- (char *) base
); /* TODO(serialize) Overflow? */
662 inline bool sanitize (hb_sanitize_context_t
*c
, void *base
) {
664 if (unlikely (!c
->check_struct (this))) return TRACE_RETURN (false);
665 unsigned int offset
= *this;
666 if (unlikely (!offset
)) return TRACE_RETURN (true);
667 Type
&obj
= StructAtOffset
<Type
> (base
, offset
);
668 return TRACE_RETURN (likely (obj
.sanitize (c
)) || neuter (c
));
670 template <typename T
>
671 inline bool sanitize (hb_sanitize_context_t
*c
, void *base
, T user_data
) {
673 if (unlikely (!c
->check_struct (this))) return TRACE_RETURN (false);
674 unsigned int offset
= *this;
675 if (unlikely (!offset
)) return TRACE_RETURN (true);
676 Type
&obj
= StructAtOffset
<Type
> (base
, offset
);
677 return TRACE_RETURN (likely (obj
.sanitize (c
, user_data
)) || neuter (c
));
681 /* Set the offset to Null */
682 inline bool neuter (hb_sanitize_context_t
*c
) {
683 if (c
->may_edit (this, this->static_size
)) {
684 this->set (0); /* 0 is Null offset */
690 template <typename Base
, typename OffsetType
, typename Type
>
691 inline const Type
& operator + (const Base
&base
, const GenericOffsetTo
<OffsetType
, Type
> &offset
) { return offset (base
); }
692 template <typename Base
, typename OffsetType
, typename Type
>
693 inline Type
& operator + (Base
&base
, GenericOffsetTo
<OffsetType
, Type
> &offset
) { return offset (base
); }
695 template <typename Type
>
696 struct OffsetTo
: GenericOffsetTo
<Offset
, Type
> {};
698 template <typename Type
>
699 struct LongOffsetTo
: GenericOffsetTo
<LongOffset
, Type
> {};
706 template <typename LenType
, typename Type
>
707 struct GenericArrayOf
709 const Type
*sub_array (unsigned int start_offset
, unsigned int *pcount
/* IN/OUT */) const
711 unsigned int count
= len
;
712 if (unlikely (start_offset
> count
))
715 count
-= start_offset
;
716 count
= MIN (count
, *pcount
);
718 return array
+ start_offset
;
721 inline const Type
& operator [] (unsigned int i
) const
723 if (unlikely (i
>= len
)) return Null(Type
);
726 inline Type
& operator [] (unsigned int i
)
730 inline unsigned int get_size (void) const
731 { return len
.static_size
+ len
* Type::static_size
; }
733 inline bool serialize (hb_serialize_context_t
*c
,
734 unsigned int items_len
)
737 if (unlikely (!c
->extend_min (*this))) return TRACE_RETURN (false);
738 len
.set (items_len
); /* TODO(serialize) Overflow? */
739 if (unlikely (!c
->extend (*this))) return TRACE_RETURN (false);
740 return TRACE_RETURN (true);
743 inline bool serialize (hb_serialize_context_t
*c
,
744 Supplier
<Type
> &items
,
745 unsigned int items_len
)
748 if (unlikely (!serialize (c
, items_len
))) return TRACE_RETURN (false);
749 for (unsigned int i
= 0; i
< items_len
; i
++)
751 items
.advance (items_len
);
752 return TRACE_RETURN (true);
755 inline bool sanitize (hb_sanitize_context_t
*c
) {
757 if (unlikely (!sanitize_shallow (c
))) return TRACE_RETURN (false);
759 /* Note: for structs that do not reference other structs,
760 * we do not need to call their sanitize() as we already did
761 * a bound check on the aggregate array size. We just include
762 * a small unreachable expression to make sure the structs
763 * pointed to do have a simple sanitize(), ie. they do not
764 * reference other structs via offsets.
766 (void) (false && array
[0].sanitize (c
));
768 return TRACE_RETURN (true);
770 inline bool sanitize (hb_sanitize_context_t
*c
, void *base
) {
772 if (unlikely (!sanitize_shallow (c
))) return TRACE_RETURN (false);
773 unsigned int count
= len
;
774 for (unsigned int i
= 0; i
< count
; i
++)
775 if (unlikely (!array
[i
].sanitize (c
, base
)))
776 return TRACE_RETURN (false);
777 return TRACE_RETURN (true);
779 template <typename T
>
780 inline bool sanitize (hb_sanitize_context_t
*c
, void *base
, T user_data
) {
782 if (unlikely (!sanitize_shallow (c
))) return TRACE_RETURN (false);
783 unsigned int count
= len
;
784 for (unsigned int i
= 0; i
< count
; i
++)
785 if (unlikely (!array
[i
].sanitize (c
, base
, user_data
)))
786 return TRACE_RETURN (false);
787 return TRACE_RETURN (true);
791 inline bool sanitize_shallow (hb_sanitize_context_t
*c
) {
793 return TRACE_RETURN (c
->check_struct (this) && c
->check_array (this, Type::static_size
, len
));
800 DEFINE_SIZE_ARRAY (sizeof (LenType
), array
);
803 /* An array with a USHORT number of elements. */
804 template <typename Type
>
805 struct ArrayOf
: GenericArrayOf
<USHORT
, Type
> {};
807 /* An array with a ULONG number of elements. */
808 template <typename Type
>
809 struct LongArrayOf
: GenericArrayOf
<ULONG
, Type
> {};
811 /* Array of Offset's */
812 template <typename Type
>
813 struct OffsetArrayOf
: ArrayOf
<OffsetTo
<Type
> > {};
815 /* Array of LongOffset's */
816 template <typename Type
>
817 struct LongOffsetArrayOf
: ArrayOf
<LongOffsetTo
<Type
> > {};
819 /* LongArray of LongOffset's */
820 template <typename Type
>
821 struct LongOffsetLongArrayOf
: LongArrayOf
<LongOffsetTo
<Type
> > {};
823 /* Array of offsets relative to the beginning of the array itself. */
824 template <typename Type
>
825 struct OffsetListOf
: OffsetArrayOf
<Type
>
827 inline const Type
& operator [] (unsigned int i
) const
829 if (unlikely (i
>= this->len
)) return Null(Type
);
830 return this+this->array
[i
];
833 inline bool sanitize (hb_sanitize_context_t
*c
) {
835 return TRACE_RETURN (OffsetArrayOf
<Type
>::sanitize (c
, this));
837 template <typename T
>
838 inline bool sanitize (hb_sanitize_context_t
*c
, T user_data
) {
840 return TRACE_RETURN (OffsetArrayOf
<Type
>::sanitize (c
, this, user_data
));
845 /* An array with a USHORT number of elements,
846 * starting at second element. */
847 template <typename Type
>
848 struct HeadlessArrayOf
850 inline const Type
& operator [] (unsigned int i
) const
852 if (unlikely (i
>= len
|| !i
)) return Null(Type
);
855 inline unsigned int get_size (void) const
856 { return len
.static_size
+ (len
? len
- 1 : 0) * Type::static_size
; }
858 inline bool serialize (hb_serialize_context_t
*c
,
859 Supplier
<Type
> &items
,
860 unsigned int items_len
)
863 if (unlikely (!c
->extend_min (*this))) return TRACE_RETURN (false);
864 len
.set (items_len
); /* TODO(serialize) Overflow? */
865 if (unlikely (!items_len
)) return TRACE_RETURN (true);
866 if (unlikely (!c
->extend (*this))) return TRACE_RETURN (false);
867 for (unsigned int i
= 0; i
< items_len
- 1; i
++)
869 items
.advance (items_len
- 1);
870 return TRACE_RETURN (true);
873 inline bool sanitize_shallow (hb_sanitize_context_t
*c
) {
874 return c
->check_struct (this)
875 && c
->check_array (this, Type::static_size
, len
);
878 inline bool sanitize (hb_sanitize_context_t
*c
) {
880 if (unlikely (!sanitize_shallow (c
))) return TRACE_RETURN (false);
882 /* Note: for structs that do not reference other structs,
883 * we do not need to call their sanitize() as we already did
884 * a bound check on the aggregate array size. We just include
885 * a small unreachable expression to make sure the structs
886 * pointed to do have a simple sanitize(), ie. they do not
887 * reference other structs via offsets.
889 (void) (false && array
[0].sanitize (c
));
891 return TRACE_RETURN (true);
897 DEFINE_SIZE_ARRAY (sizeof (USHORT
), array
);
901 /* An array with sorted elements. Supports binary searching. */
902 template <typename Type
>
903 struct SortedArrayOf
: ArrayOf
<Type
> {
905 template <typename SearchType
>
906 inline int search (const SearchType
&x
) const {
907 unsigned int count
= this->len
;
908 /* Linear search is *much* faster for small counts. */
909 if (likely (count
< 32)) {
910 for (unsigned int i
= 0; i
< count
; i
++)
911 if (this->array
[i
].cmp (x
) == 0)
916 static int cmp (const SearchType
*a
, const Type
*b
) { return b
->cmp (*a
); }
918 const Type
*p
= (const Type
*) bsearch (&x
, this->array
, this->len
, sizeof (this->array
[0]), (hb_compare_func_t
) Cmp::cmp
);
919 return p
? p
- this->array
: -1;
928 #endif /* HB_OPEN_TYPE_PRIVATE_HH */