comparison mupdf-source/thirdparty/harfbuzz/src/hb-aat-layout-common.hh @ 2:b50eed0cc0ef upstream

ADD: MuPDF v1.26.7: the MuPDF source as downloaded by a default build of PyMuPDF 1.26.4. The directory name has changed: no version number in the expanded directory now.
author Franz Glasner <fzglas.hg@dom66.de>
date Mon, 15 Sep 2025 11:43:07 +0200
parents
children
comparison
equal deleted inserted replaced
1:1d09e1dec1d9 2:b50eed0cc0ef
1 /*
2 * Copyright © 2017 Google, Inc.
3 *
4 * This is part of HarfBuzz, a text shaping library.
5 *
6 * Permission is hereby granted, without written agreement and without
7 * license or royalty fees, to use, copy, modify, and distribute this
8 * software and its documentation for any purpose, provided that the
9 * above copyright notice and the following two paragraphs appear in
10 * all copies of this software.
11 *
12 * IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR
13 * DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES
14 * ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN
15 * IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
16 * DAMAGE.
17 *
18 * THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING,
19 * BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
20 * FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE PROVIDED HEREUNDER IS
21 * ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO
22 * PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
23 *
24 * Google Author(s): Behdad Esfahbod
25 */
26
27 #ifndef HB_AAT_LAYOUT_COMMON_HH
28 #define HB_AAT_LAYOUT_COMMON_HH
29
30 #include "hb-aat-layout.hh"
31 #include "hb-open-type.hh"
32
33 namespace OT {
34 struct GDEF;
35 };
36
37 namespace AAT {
38
39 using namespace OT;
40
41
42 /*
43 * Lookup Table
44 */
45
46 template <typename T> struct Lookup;
47
48 template <typename T>
49 struct LookupFormat0
50 {
51 friend struct Lookup<T>;
52
53 private:
54 const T* get_value (hb_codepoint_t glyph_id, unsigned int num_glyphs) const
55 {
56 if (unlikely (glyph_id >= num_glyphs)) return nullptr;
57 return &arrayZ[glyph_id];
58 }
59
60 bool sanitize (hb_sanitize_context_t *c) const
61 {
62 TRACE_SANITIZE (this);
63 return_trace (arrayZ.sanitize (c, c->get_num_glyphs ()));
64 }
65 bool sanitize (hb_sanitize_context_t *c, const void *base) const
66 {
67 TRACE_SANITIZE (this);
68 return_trace (arrayZ.sanitize (c, c->get_num_glyphs (), base));
69 }
70
71 protected:
72 HBUINT16 format; /* Format identifier--format = 0 */
73 UnsizedArrayOf<T>
74 arrayZ; /* Array of lookup values, indexed by glyph index. */
75 public:
76 DEFINE_SIZE_UNBOUNDED (2);
77 };
78
79
80 template <typename T>
81 struct LookupSegmentSingle
82 {
83 static constexpr unsigned TerminationWordCount = 2u;
84
85 int cmp (hb_codepoint_t g) const
86 { return g < first ? -1 : g <= last ? 0 : +1 ; }
87
88 bool sanitize (hb_sanitize_context_t *c) const
89 {
90 TRACE_SANITIZE (this);
91 return_trace (c->check_struct (this) && value.sanitize (c));
92 }
93 bool sanitize (hb_sanitize_context_t *c, const void *base) const
94 {
95 TRACE_SANITIZE (this);
96 return_trace (c->check_struct (this) && value.sanitize (c, base));
97 }
98
99 HBGlyphID16 last; /* Last GlyphID in this segment */
100 HBGlyphID16 first; /* First GlyphID in this segment */
101 T value; /* The lookup value (only one) */
102 public:
103 DEFINE_SIZE_STATIC (4 + T::static_size);
104 };
105
106 template <typename T>
107 struct LookupFormat2
108 {
109 friend struct Lookup<T>;
110
111 private:
112 const T* get_value (hb_codepoint_t glyph_id) const
113 {
114 const LookupSegmentSingle<T> *v = segments.bsearch (glyph_id);
115 return v ? &v->value : nullptr;
116 }
117
118 bool sanitize (hb_sanitize_context_t *c) const
119 {
120 TRACE_SANITIZE (this);
121 return_trace (segments.sanitize (c));
122 }
123 bool sanitize (hb_sanitize_context_t *c, const void *base) const
124 {
125 TRACE_SANITIZE (this);
126 return_trace (segments.sanitize (c, base));
127 }
128
129 protected:
130 HBUINT16 format; /* Format identifier--format = 2 */
131 VarSizedBinSearchArrayOf<LookupSegmentSingle<T>>
132 segments; /* The actual segments. These must already be sorted,
133 * according to the first word in each one (the last
134 * glyph in each segment). */
135 public:
136 DEFINE_SIZE_ARRAY (8, segments);
137 };
138
139 template <typename T>
140 struct LookupSegmentArray
141 {
142 static constexpr unsigned TerminationWordCount = 2u;
143
144 const T* get_value (hb_codepoint_t glyph_id, const void *base) const
145 {
146 return first <= glyph_id && glyph_id <= last ? &(base+valuesZ)[glyph_id - first] : nullptr;
147 }
148
149 int cmp (hb_codepoint_t g) const
150 { return g < first ? -1 : g <= last ? 0 : +1; }
151
152 bool sanitize (hb_sanitize_context_t *c, const void *base) const
153 {
154 TRACE_SANITIZE (this);
155 return_trace (c->check_struct (this) &&
156 first <= last &&
157 valuesZ.sanitize (c, base, last - first + 1));
158 }
159 template <typename ...Ts>
160 bool sanitize (hb_sanitize_context_t *c, const void *base, Ts&&... ds) const
161 {
162 TRACE_SANITIZE (this);
163 return_trace (c->check_struct (this) &&
164 first <= last &&
165 valuesZ.sanitize (c, base, last - first + 1, std::forward<Ts> (ds)...));
166 }
167
168 HBGlyphID16 last; /* Last GlyphID in this segment */
169 HBGlyphID16 first; /* First GlyphID in this segment */
170 NNOffset16To<UnsizedArrayOf<T>>
171 valuesZ; /* A 16-bit offset from the start of
172 * the table to the data. */
173 public:
174 DEFINE_SIZE_STATIC (6);
175 };
176
177 template <typename T>
178 struct LookupFormat4
179 {
180 friend struct Lookup<T>;
181
182 private:
183 const T* get_value (hb_codepoint_t glyph_id) const
184 {
185 const LookupSegmentArray<T> *v = segments.bsearch (glyph_id);
186 return v ? v->get_value (glyph_id, this) : nullptr;
187 }
188
189 bool sanitize (hb_sanitize_context_t *c) const
190 {
191 TRACE_SANITIZE (this);
192 return_trace (segments.sanitize (c, this));
193 }
194 bool sanitize (hb_sanitize_context_t *c, const void *base) const
195 {
196 TRACE_SANITIZE (this);
197 return_trace (segments.sanitize (c, this, base));
198 }
199
200 protected:
201 HBUINT16 format; /* Format identifier--format = 4 */
202 VarSizedBinSearchArrayOf<LookupSegmentArray<T>>
203 segments; /* The actual segments. These must already be sorted,
204 * according to the first word in each one (the last
205 * glyph in each segment). */
206 public:
207 DEFINE_SIZE_ARRAY (8, segments);
208 };
209
210 template <typename T>
211 struct LookupSingle
212 {
213 static constexpr unsigned TerminationWordCount = 1u;
214
215 int cmp (hb_codepoint_t g) const { return glyph.cmp (g); }
216
217 bool sanitize (hb_sanitize_context_t *c) const
218 {
219 TRACE_SANITIZE (this);
220 return_trace (c->check_struct (this) && value.sanitize (c));
221 }
222 bool sanitize (hb_sanitize_context_t *c, const void *base) const
223 {
224 TRACE_SANITIZE (this);
225 return_trace (c->check_struct (this) && value.sanitize (c, base));
226 }
227
228 HBGlyphID16 glyph; /* Last GlyphID */
229 T value; /* The lookup value (only one) */
230 public:
231 DEFINE_SIZE_STATIC (2 + T::static_size);
232 };
233
234 template <typename T>
235 struct LookupFormat6
236 {
237 friend struct Lookup<T>;
238
239 private:
240 const T* get_value (hb_codepoint_t glyph_id) const
241 {
242 const LookupSingle<T> *v = entries.bsearch (glyph_id);
243 return v ? &v->value : nullptr;
244 }
245
246 bool sanitize (hb_sanitize_context_t *c) const
247 {
248 TRACE_SANITIZE (this);
249 return_trace (entries.sanitize (c));
250 }
251 bool sanitize (hb_sanitize_context_t *c, const void *base) const
252 {
253 TRACE_SANITIZE (this);
254 return_trace (entries.sanitize (c, base));
255 }
256
257 protected:
258 HBUINT16 format; /* Format identifier--format = 6 */
259 VarSizedBinSearchArrayOf<LookupSingle<T>>
260 entries; /* The actual entries, sorted by glyph index. */
261 public:
262 DEFINE_SIZE_ARRAY (8, entries);
263 };
264
265 template <typename T>
266 struct LookupFormat8
267 {
268 friend struct Lookup<T>;
269
270 private:
271 const T* get_value (hb_codepoint_t glyph_id) const
272 {
273 return firstGlyph <= glyph_id && glyph_id - firstGlyph < glyphCount ?
274 &valueArrayZ[glyph_id - firstGlyph] : nullptr;
275 }
276
277 bool sanitize (hb_sanitize_context_t *c) const
278 {
279 TRACE_SANITIZE (this);
280 return_trace (c->check_struct (this) && valueArrayZ.sanitize (c, glyphCount));
281 }
282 bool sanitize (hb_sanitize_context_t *c, const void *base) const
283 {
284 TRACE_SANITIZE (this);
285 return_trace (c->check_struct (this) && valueArrayZ.sanitize (c, glyphCount, base));
286 }
287
288 protected:
289 HBUINT16 format; /* Format identifier--format = 8 */
290 HBGlyphID16 firstGlyph; /* First glyph index included in the trimmed array. */
291 HBUINT16 glyphCount; /* Total number of glyphs (equivalent to the last
292 * glyph minus the value of firstGlyph plus 1). */
293 UnsizedArrayOf<T>
294 valueArrayZ; /* The lookup values (indexed by the glyph index
295 * minus the value of firstGlyph). */
296 public:
297 DEFINE_SIZE_ARRAY (6, valueArrayZ);
298 };
299
300 template <typename T>
301 struct LookupFormat10
302 {
303 friend struct Lookup<T>;
304
305 private:
306 const typename T::type get_value_or_null (hb_codepoint_t glyph_id) const
307 {
308 if (!(firstGlyph <= glyph_id && glyph_id - firstGlyph < glyphCount))
309 return Null (T);
310
311 const HBUINT8 *p = &valueArrayZ[(glyph_id - firstGlyph) * valueSize];
312
313 unsigned int v = 0;
314 unsigned int count = valueSize;
315 for (unsigned int i = 0; i < count; i++)
316 v = (v << 8) | *p++;
317
318 return v;
319 }
320
321 bool sanitize (hb_sanitize_context_t *c) const
322 {
323 TRACE_SANITIZE (this);
324 return_trace (c->check_struct (this) &&
325 valueSize <= 4 &&
326 valueArrayZ.sanitize (c, glyphCount * valueSize));
327 }
328
329 protected:
330 HBUINT16 format; /* Format identifier--format = 8 */
331 HBUINT16 valueSize; /* Byte size of each value. */
332 HBGlyphID16 firstGlyph; /* First glyph index included in the trimmed array. */
333 HBUINT16 glyphCount; /* Total number of glyphs (equivalent to the last
334 * glyph minus the value of firstGlyph plus 1). */
335 UnsizedArrayOf<HBUINT8>
336 valueArrayZ; /* The lookup values (indexed by the glyph index
337 * minus the value of firstGlyph). */
338 public:
339 DEFINE_SIZE_ARRAY (8, valueArrayZ);
340 };
341
342 template <typename T>
343 struct Lookup
344 {
345 const T* get_value (hb_codepoint_t glyph_id, unsigned int num_glyphs) const
346 {
347 switch (u.format) {
348 case 0: return u.format0.get_value (glyph_id, num_glyphs);
349 case 2: return u.format2.get_value (glyph_id);
350 case 4: return u.format4.get_value (glyph_id);
351 case 6: return u.format6.get_value (glyph_id);
352 case 8: return u.format8.get_value (glyph_id);
353 default:return nullptr;
354 }
355 }
356
357 const typename T::type get_value_or_null (hb_codepoint_t glyph_id, unsigned int num_glyphs) const
358 {
359 switch (u.format) {
360 /* Format 10 cannot return a pointer. */
361 case 10: return u.format10.get_value_or_null (glyph_id);
362 default:
363 const T *v = get_value (glyph_id, num_glyphs);
364 return v ? *v : Null (T);
365 }
366 }
367
368 typename T::type get_class (hb_codepoint_t glyph_id,
369 unsigned int num_glyphs,
370 unsigned int outOfRange) const
371 {
372 const T *v = get_value (glyph_id, num_glyphs);
373 return v ? *v : outOfRange;
374 }
375
376 bool sanitize (hb_sanitize_context_t *c) const
377 {
378 TRACE_SANITIZE (this);
379 if (!u.format.sanitize (c)) return_trace (false);
380 switch (u.format) {
381 case 0: return_trace (u.format0.sanitize (c));
382 case 2: return_trace (u.format2.sanitize (c));
383 case 4: return_trace (u.format4.sanitize (c));
384 case 6: return_trace (u.format6.sanitize (c));
385 case 8: return_trace (u.format8.sanitize (c));
386 case 10: return_trace (u.format10.sanitize (c));
387 default:return_trace (true);
388 }
389 }
390 bool sanitize (hb_sanitize_context_t *c, const void *base) const
391 {
392 TRACE_SANITIZE (this);
393 if (!u.format.sanitize (c)) return_trace (false);
394 switch (u.format) {
395 case 0: return_trace (u.format0.sanitize (c, base));
396 case 2: return_trace (u.format2.sanitize (c, base));
397 case 4: return_trace (u.format4.sanitize (c, base));
398 case 6: return_trace (u.format6.sanitize (c, base));
399 case 8: return_trace (u.format8.sanitize (c, base));
400 case 10: return_trace (false); /* We don't support format10 here currently. */
401 default:return_trace (true);
402 }
403 }
404
405 protected:
406 union {
407 HBUINT16 format; /* Format identifier */
408 LookupFormat0<T> format0;
409 LookupFormat2<T> format2;
410 LookupFormat4<T> format4;
411 LookupFormat6<T> format6;
412 LookupFormat8<T> format8;
413 LookupFormat10<T> format10;
414 } u;
415 public:
416 DEFINE_SIZE_UNION (2, format);
417 };
418 DECLARE_NULL_NAMESPACE_BYTES_TEMPLATE1 (AAT, Lookup, 2);
419
420 enum { DELETED_GLYPH = 0xFFFF };
421
422 /*
423 * (Extended) State Table
424 */
425
426 template <typename T>
427 struct Entry
428 {
429 bool sanitize (hb_sanitize_context_t *c, unsigned int count) const
430 {
431 TRACE_SANITIZE (this);
432 /* Note, we don't recurse-sanitize data because we don't access it.
433 * That said, in our DEFINE_SIZE_STATIC we access T::static_size,
434 * which ensures that data has a simple sanitize(). To be determined
435 * if I need to remove that as well.
436 *
437 * HOWEVER! Because we are a template, our DEFINE_SIZE_STATIC
438 * assertion wouldn't be checked, hence the line below. */
439 static_assert (T::static_size, "");
440
441 return_trace (c->check_struct (this));
442 }
443
444 public:
445 HBUINT16 newState; /* Byte offset from beginning of state table
446 * to the new state. Really?!?! Or just state
447 * number? The latter in morx for sure. */
448 HBUINT16 flags; /* Table specific. */
449 T data; /* Optional offsets to per-glyph tables. */
450 public:
451 DEFINE_SIZE_STATIC (4 + T::static_size);
452 };
453
454 template <>
455 struct Entry<void>
456 {
457 bool sanitize (hb_sanitize_context_t *c, unsigned int count /*XXX Unused?*/) const
458 {
459 TRACE_SANITIZE (this);
460 return_trace (c->check_struct (this));
461 }
462
463 public:
464 HBUINT16 newState; /* Byte offset from beginning of state table to the new state. */
465 HBUINT16 flags; /* Table specific. */
466 public:
467 DEFINE_SIZE_STATIC (4);
468 };
469
470 template <typename Types, typename Extra>
471 struct StateTable
472 {
473 typedef typename Types::HBUINT HBUINT;
474 typedef typename Types::HBUSHORT HBUSHORT;
475 typedef typename Types::ClassTypeNarrow ClassType;
476
477 enum State
478 {
479 STATE_START_OF_TEXT = 0,
480 STATE_START_OF_LINE = 1,
481 };
482 enum Class
483 {
484 CLASS_END_OF_TEXT = 0,
485 CLASS_OUT_OF_BOUNDS = 1,
486 CLASS_DELETED_GLYPH = 2,
487 CLASS_END_OF_LINE = 3,
488 };
489
490 int new_state (unsigned int newState) const
491 { return Types::extended ? newState : ((int) newState - (int) stateArrayTable) / (int) nClasses; }
492
493 unsigned int get_class (hb_codepoint_t glyph_id, unsigned int num_glyphs) const
494 {
495 if (unlikely (glyph_id == DELETED_GLYPH)) return CLASS_DELETED_GLYPH;
496 return (this+classTable).get_class (glyph_id, num_glyphs, 1);
497 }
498
499 const Entry<Extra> *get_entries () const
500 { return (this+entryTable).arrayZ; }
501
502 const Entry<Extra> &get_entry (int state, unsigned int klass) const
503 {
504 if (unlikely (klass >= nClasses))
505 klass = StateTable::CLASS_OUT_OF_BOUNDS;
506
507 const HBUSHORT *states = (this+stateArrayTable).arrayZ;
508 const Entry<Extra> *entries = (this+entryTable).arrayZ;
509
510 unsigned int entry = states[state * nClasses + klass];
511 DEBUG_MSG (APPLY, nullptr, "e%u", entry);
512
513 return entries[entry];
514 }
515
516 bool sanitize (hb_sanitize_context_t *c,
517 unsigned int *num_entries_out = nullptr) const
518 {
519 TRACE_SANITIZE (this);
520 if (unlikely (!(c->check_struct (this) &&
521 nClasses >= 4 /* Ensure pre-defined classes fit. */ &&
522 classTable.sanitize (c, this)))) return_trace (false);
523
524 const HBUSHORT *states = (this+stateArrayTable).arrayZ;
525 const Entry<Extra> *entries = (this+entryTable).arrayZ;
526
527 unsigned int num_classes = nClasses;
528 if (unlikely (hb_unsigned_mul_overflows (num_classes, states[0].static_size)))
529 return_trace (false);
530 unsigned int row_stride = num_classes * states[0].static_size;
531
532 /* Apple 'kern' table has this peculiarity:
533 *
534 * "Because the stateTableOffset in the state table header is (strictly
535 * speaking) redundant, some 'kern' tables use it to record an initial
536 * state where that should not be StartOfText. To determine if this is
537 * done, calculate what the stateTableOffset should be. If it's different
538 * from the actual stateTableOffset, use it as the initial state."
539 *
540 * We implement this by calling the initial state zero, but allow *negative*
541 * states if the start state indeed was not the first state. Since the code
542 * is shared, this will also apply to 'mort' table. The 'kerx' / 'morx'
543 * tables are not affected since those address states by index, not offset.
544 */
545
546 int min_state = 0;
547 int max_state = 0;
548 unsigned int num_entries = 0;
549
550 int state_pos = 0;
551 int state_neg = 0;
552 unsigned int entry = 0;
553 while (min_state < state_neg || state_pos <= max_state)
554 {
555 if (min_state < state_neg)
556 {
557 /* Negative states. */
558 if (unlikely (hb_unsigned_mul_overflows (min_state, num_classes)))
559 return_trace (false);
560 if (unlikely (!c->check_range (&states[min_state * num_classes],
561 -min_state,
562 row_stride)))
563 return_trace (false);
564 if ((c->max_ops -= state_neg - min_state) <= 0)
565 return_trace (false);
566 { /* Sweep new states. */
567 const HBUSHORT *stop = &states[min_state * num_classes];
568 if (unlikely (stop > states))
569 return_trace (false);
570 for (const HBUSHORT *p = states; stop < p; p--)
571 num_entries = hb_max (num_entries, *(p - 1) + 1u);
572 state_neg = min_state;
573 }
574 }
575
576 if (state_pos <= max_state)
577 {
578 /* Positive states. */
579 if (unlikely (!c->check_range (states,
580 max_state + 1,
581 row_stride)))
582 return_trace (false);
583 if ((c->max_ops -= max_state - state_pos + 1) <= 0)
584 return_trace (false);
585 { /* Sweep new states. */
586 if (unlikely (hb_unsigned_mul_overflows ((max_state + 1), num_classes)))
587 return_trace (false);
588 const HBUSHORT *stop = &states[(max_state + 1) * num_classes];
589 if (unlikely (stop < states))
590 return_trace (false);
591 for (const HBUSHORT *p = &states[state_pos * num_classes]; p < stop; p++)
592 num_entries = hb_max (num_entries, *p + 1u);
593 state_pos = max_state + 1;
594 }
595 }
596
597 if (unlikely (!c->check_array (entries, num_entries)))
598 return_trace (false);
599 if ((c->max_ops -= num_entries - entry) <= 0)
600 return_trace (false);
601 { /* Sweep new entries. */
602 const Entry<Extra> *stop = &entries[num_entries];
603 for (const Entry<Extra> *p = &entries[entry]; p < stop; p++)
604 {
605 int newState = new_state (p->newState);
606 min_state = hb_min (min_state, newState);
607 max_state = hb_max (max_state, newState);
608 }
609 entry = num_entries;
610 }
611 }
612
613 if (num_entries_out)
614 *num_entries_out = num_entries;
615
616 return_trace (true);
617 }
618
619 protected:
620 HBUINT nClasses; /* Number of classes, which is the number of indices
621 * in a single line in the state array. */
622 NNOffsetTo<ClassType, HBUINT>
623 classTable; /* Offset to the class table. */
624 NNOffsetTo<UnsizedArrayOf<HBUSHORT>, HBUINT>
625 stateArrayTable;/* Offset to the state array. */
626 NNOffsetTo<UnsizedArrayOf<Entry<Extra>>, HBUINT>
627 entryTable; /* Offset to the entry array. */
628
629 public:
630 DEFINE_SIZE_STATIC (4 * sizeof (HBUINT));
631 };
632
633 template <typename HBUCHAR>
634 struct ClassTable
635 {
636 unsigned int get_class (hb_codepoint_t glyph_id, unsigned int outOfRange) const
637 {
638 unsigned int i = glyph_id - firstGlyph;
639 return i >= classArray.len ? outOfRange : classArray.arrayZ[i];
640 }
641 unsigned int get_class (hb_codepoint_t glyph_id,
642 unsigned int num_glyphs HB_UNUSED,
643 unsigned int outOfRange) const
644 {
645 return get_class (glyph_id, outOfRange);
646 }
647 bool sanitize (hb_sanitize_context_t *c) const
648 {
649 TRACE_SANITIZE (this);
650 return_trace (c->check_struct (this) && classArray.sanitize (c));
651 }
652 protected:
653 HBGlyphID16 firstGlyph; /* First glyph index included in the trimmed array. */
654 Array16Of<HBUCHAR> classArray; /* The class codes (indexed by glyph index minus
655 * firstGlyph). */
656 public:
657 DEFINE_SIZE_ARRAY (4, classArray);
658 };
659
660 struct ObsoleteTypes
661 {
662 static constexpr bool extended = false;
663 typedef HBUINT16 HBUINT;
664 typedef HBUINT8 HBUSHORT;
665 typedef ClassTable<HBUINT8> ClassTypeNarrow;
666 typedef ClassTable<HBUINT16> ClassTypeWide;
667
668 template <typename T>
669 static unsigned int offsetToIndex (unsigned int offset,
670 const void *base,
671 const T *array)
672 {
673 /* https://github.com/harfbuzz/harfbuzz/issues/3483 */
674 /* If offset is less than base, return an offset that would
675 * result in an address half a 32bit address-space away,
676 * to make sure sanitize fails even on 32bit builds. */
677 if (unlikely (offset < unsigned ((const char *) array - (const char *) base)))
678 return INT_MAX / T::static_size;
679
680 /* https://github.com/harfbuzz/harfbuzz/issues/2816 */
681 return (offset - unsigned ((const char *) array - (const char *) base)) / T::static_size;
682 }
683 template <typename T>
684 static unsigned int byteOffsetToIndex (unsigned int offset,
685 const void *base,
686 const T *array)
687 {
688 return offsetToIndex (offset, base, array);
689 }
690 template <typename T>
691 static unsigned int wordOffsetToIndex (unsigned int offset,
692 const void *base,
693 const T *array)
694 {
695 return offsetToIndex (2 * offset, base, array);
696 }
697 };
698 struct ExtendedTypes
699 {
700 static constexpr bool extended = true;
701 typedef HBUINT32 HBUINT;
702 typedef HBUINT16 HBUSHORT;
703 typedef Lookup<HBUINT16> ClassTypeNarrow;
704 typedef Lookup<HBUINT16> ClassTypeWide;
705
706 template <typename T>
707 static unsigned int offsetToIndex (unsigned int offset,
708 const void *base HB_UNUSED,
709 const T *array HB_UNUSED)
710 {
711 return offset;
712 }
713 template <typename T>
714 static unsigned int byteOffsetToIndex (unsigned int offset,
715 const void *base HB_UNUSED,
716 const T *array HB_UNUSED)
717 {
718 return offset / 2;
719 }
720 template <typename T>
721 static unsigned int wordOffsetToIndex (unsigned int offset,
722 const void *base HB_UNUSED,
723 const T *array HB_UNUSED)
724 {
725 return offset;
726 }
727 };
728
729 template <typename Types, typename EntryData>
730 struct StateTableDriver
731 {
732 using StateTableT = StateTable<Types, EntryData>;
733 using EntryT = Entry<EntryData>;
734
735 StateTableDriver (const StateTableT &machine_,
736 hb_buffer_t *buffer_,
737 hb_face_t *face_) :
738 machine (machine_),
739 buffer (buffer_),
740 num_glyphs (face_->get_num_glyphs ()) {}
741
742 template <typename context_t>
743 void drive (context_t *c)
744 {
745 if (!c->in_place)
746 buffer->clear_output ();
747
748 int state = StateTableT::STATE_START_OF_TEXT;
749 for (buffer->idx = 0; buffer->successful;)
750 {
751 unsigned int klass = buffer->idx < buffer->len ?
752 machine.get_class (buffer->info[buffer->idx].codepoint, num_glyphs) :
753 (unsigned) StateTableT::CLASS_END_OF_TEXT;
754 DEBUG_MSG (APPLY, nullptr, "c%u at %u", klass, buffer->idx);
755 const EntryT &entry = machine.get_entry (state, klass);
756 const int next_state = machine.new_state (entry.newState);
757
758 /* Conditions under which it's guaranteed safe-to-break before current glyph:
759 *
760 * 1. There was no action in this transition; and
761 *
762 * 2. If we break before current glyph, the results will be the same. That
763 * is guaranteed if:
764 *
765 * 2a. We were already in start-of-text state; or
766 *
767 * 2b. We are epsilon-transitioning to start-of-text state; or
768 *
769 * 2c. Starting from start-of-text state seeing current glyph:
770 *
771 * 2c'. There won't be any actions; and
772 *
773 * 2c". We would end up in the same state that we were going to end up
774 * in now, including whether epsilon-transitioning.
775 *
776 * and
777 *
778 * 3. If we break before current glyph, there won't be any end-of-text action
779 * after previous glyph.
780 *
781 * This triples the transitions we need to look up, but is worth returning
782 * granular unsafe-to-break results. See eg.:
783 *
784 * https://github.com/harfbuzz/harfbuzz/issues/2860
785 */
786 const EntryT *wouldbe_entry;
787 bool safe_to_break =
788 /* 1. */
789 !c->is_actionable (this, entry)
790 &&
791 /* 2. */
792 (
793 /* 2a. */
794 state == StateTableT::STATE_START_OF_TEXT
795 ||
796 /* 2b. */
797 (
798 (entry.flags & context_t::DontAdvance) &&
799 next_state == StateTableT::STATE_START_OF_TEXT
800 )
801 ||
802 /* 2c. */
803 (
804 wouldbe_entry = &machine.get_entry (StateTableT::STATE_START_OF_TEXT, klass)
805 ,
806 /* 2c'. */
807 !c->is_actionable (this, *wouldbe_entry)
808 &&
809 /* 2c". */
810 (
811 next_state == machine.new_state (wouldbe_entry->newState)
812 &&
813 (entry.flags & context_t::DontAdvance) == (wouldbe_entry->flags & context_t::DontAdvance)
814 )
815 )
816 )
817 &&
818 /* 3. */
819 !c->is_actionable (this, machine.get_entry (state, StateTableT::CLASS_END_OF_TEXT))
820 ;
821
822 if (!safe_to_break && buffer->backtrack_len () && buffer->idx < buffer->len)
823 buffer->unsafe_to_break_from_outbuffer (buffer->backtrack_len () - 1, buffer->idx + 1);
824
825 c->transition (this, entry);
826
827 state = next_state;
828 DEBUG_MSG (APPLY, nullptr, "s%d", state);
829
830 if (buffer->idx == buffer->len || unlikely (!buffer->successful))
831 break;
832
833 if (!(entry.flags & context_t::DontAdvance) || buffer->max_ops-- <= 0)
834 (void) buffer->next_glyph ();
835 }
836
837 if (!c->in_place)
838 buffer->sync ();
839 }
840
841 public:
842 const StateTableT &machine;
843 hb_buffer_t *buffer;
844 unsigned int num_glyphs;
845 };
846
847
848 struct ankr;
849
850 struct hb_aat_apply_context_t :
851 hb_dispatch_context_t<hb_aat_apply_context_t, bool, HB_DEBUG_APPLY>
852 {
853 const char *get_name () { return "APPLY"; }
854 template <typename T>
855 return_t dispatch (const T &obj) { return obj.apply (this); }
856 static return_t default_return_value () { return false; }
857 bool stop_sublookup_iteration (return_t r) const { return r; }
858
859 const hb_ot_shape_plan_t *plan;
860 hb_font_t *font;
861 hb_face_t *face;
862 hb_buffer_t *buffer;
863 hb_sanitize_context_t sanitizer;
864 const ankr *ankr_table;
865 const OT::GDEF *gdef_table;
866
867 /* Unused. For debug tracing only. */
868 unsigned int lookup_index;
869
870 HB_INTERNAL hb_aat_apply_context_t (const hb_ot_shape_plan_t *plan_,
871 hb_font_t *font_,
872 hb_buffer_t *buffer_,
873 hb_blob_t *blob = const_cast<hb_blob_t *> (&Null (hb_blob_t)));
874
875 HB_INTERNAL ~hb_aat_apply_context_t ();
876
877 HB_INTERNAL void set_ankr_table (const AAT::ankr *ankr_table_);
878
879 void set_lookup_index (unsigned int i) { lookup_index = i; }
880 };
881
882
883 } /* namespace AAT */
884
885
886 #endif /* HB_AAT_LAYOUT_COMMON_HH */