comparison mupdf-source/thirdparty/harfbuzz/src/OT/Layout/GPOS/MarkBasePosFormat1.hh @ 2:b50eed0cc0ef upstream

ADD: MuPDF v1.26.7: the MuPDF source as downloaded by a default build of PyMuPDF 1.26.4. The directory name has changed: no version number in the expanded directory now.
author Franz Glasner <fzglas.hg@dom66.de>
date Mon, 15 Sep 2025 11:43:07 +0200
parents
children
comparison
equal deleted inserted replaced
1:1d09e1dec1d9 2:b50eed0cc0ef
1 #ifndef OT_LAYOUT_GPOS_MARKBASEPOSFORMAT1_HH
2 #define OT_LAYOUT_GPOS_MARKBASEPOSFORMAT1_HH
3
4 #include "MarkArray.hh"
5
6 namespace OT {
7 namespace Layout {
8 namespace GPOS_impl {
9
10 typedef AnchorMatrix BaseArray; /* base-major--
11 * in order of BaseCoverage Index--,
12 * mark-minor--
13 * ordered by class--zero-based. */
14
15 template <typename Types>
16 struct MarkBasePosFormat1_2
17 {
18 protected:
19 HBUINT16 format; /* Format identifier--format = 1 */
20 typename Types::template OffsetTo<Coverage>
21 markCoverage; /* Offset to MarkCoverage table--from
22 * beginning of MarkBasePos subtable */
23 typename Types::template OffsetTo<Coverage>
24 baseCoverage; /* Offset to BaseCoverage table--from
25 * beginning of MarkBasePos subtable */
26 HBUINT16 classCount; /* Number of classes defined for marks */
27 typename Types::template OffsetTo<MarkArray>
28 markArray; /* Offset to MarkArray table--from
29 * beginning of MarkBasePos subtable */
30 typename Types::template OffsetTo<BaseArray>
31 baseArray; /* Offset to BaseArray table--from
32 * beginning of MarkBasePos subtable */
33
34 public:
35 DEFINE_SIZE_STATIC (4 + 4 * Types::size);
36
37 bool sanitize (hb_sanitize_context_t *c) const
38 {
39 TRACE_SANITIZE (this);
40 return_trace (c->check_struct (this) &&
41 markCoverage.sanitize (c, this) &&
42 baseCoverage.sanitize (c, this) &&
43 markArray.sanitize (c, this) &&
44 baseArray.sanitize (c, this, (unsigned int) classCount));
45 }
46
47 bool intersects (const hb_set_t *glyphs) const
48 {
49 return (this+markCoverage).intersects (glyphs) &&
50 (this+baseCoverage).intersects (glyphs);
51 }
52
53 void closure_lookups (hb_closure_lookups_context_t *c) const {}
54
55 void collect_variation_indices (hb_collect_variation_indices_context_t *c) const
56 {
57 + hb_zip (this+markCoverage, this+markArray)
58 | hb_filter (c->glyph_set, hb_first)
59 | hb_map (hb_second)
60 | hb_apply ([&] (const MarkRecord& record) { record.collect_variation_indices (c, &(this+markArray)); })
61 ;
62
63 hb_map_t klass_mapping;
64 Markclass_closure_and_remap_indexes (this+markCoverage, this+markArray, *c->glyph_set, &klass_mapping);
65
66 unsigned basecount = (this+baseArray).rows;
67 auto base_iter =
68 + hb_zip (this+baseCoverage, hb_range (basecount))
69 | hb_filter (c->glyph_set, hb_first)
70 | hb_map (hb_second)
71 ;
72
73 hb_sorted_vector_t<unsigned> base_indexes;
74 for (const unsigned row : base_iter)
75 {
76 + hb_range ((unsigned) classCount)
77 | hb_filter (klass_mapping)
78 | hb_map ([&] (const unsigned col) { return row * (unsigned) classCount + col; })
79 | hb_sink (base_indexes)
80 ;
81 }
82 (this+baseArray).collect_variation_indices (c, base_indexes.iter ());
83 }
84
85 void collect_glyphs (hb_collect_glyphs_context_t *c) const
86 {
87 if (unlikely (!(this+markCoverage).collect_coverage (c->input))) return;
88 if (unlikely (!(this+baseCoverage).collect_coverage (c->input))) return;
89 }
90
91 const Coverage &get_coverage () const { return this+markCoverage; }
92
93 bool apply (hb_ot_apply_context_t *c) const
94 {
95 TRACE_APPLY (this);
96 hb_buffer_t *buffer = c->buffer;
97 unsigned int mark_index = (this+markCoverage).get_coverage (buffer->cur().codepoint);
98 if (likely (mark_index == NOT_COVERED)) return_trace (false);
99
100 /* Now we search backwards for a non-mark glyph */
101 hb_ot_apply_context_t::skipping_iterator_t &skippy_iter = c->iter_input;
102 skippy_iter.reset (buffer->idx, 1);
103 skippy_iter.set_lookup_props (LookupFlag::IgnoreMarks);
104 do {
105 unsigned unsafe_from;
106 if (!skippy_iter.prev (&unsafe_from))
107 {
108 buffer->unsafe_to_concat_from_outbuffer (unsafe_from, buffer->idx + 1);
109 return_trace (false);
110 }
111
112 /* We only want to attach to the first of a MultipleSubst sequence.
113 * https://github.com/harfbuzz/harfbuzz/issues/740
114 * Reject others...
115 * ...but stop if we find a mark in the MultipleSubst sequence:
116 * https://github.com/harfbuzz/harfbuzz/issues/1020 */
117 if (!_hb_glyph_info_multiplied (&buffer->info[skippy_iter.idx]) ||
118 0 == _hb_glyph_info_get_lig_comp (&buffer->info[skippy_iter.idx]) ||
119 (skippy_iter.idx == 0 ||
120 _hb_glyph_info_is_mark (&buffer->info[skippy_iter.idx - 1]) ||
121 !_hb_glyph_info_multiplied (&buffer->info[skippy_iter.idx - 1]) ||
122 _hb_glyph_info_get_lig_id (&buffer->info[skippy_iter.idx]) !=
123 _hb_glyph_info_get_lig_id (&buffer->info[skippy_iter.idx - 1]) ||
124 _hb_glyph_info_get_lig_comp (&buffer->info[skippy_iter.idx]) !=
125 _hb_glyph_info_get_lig_comp (&buffer->info[skippy_iter.idx - 1]) + 1
126 ))
127 break;
128 skippy_iter.reject ();
129 } while (true);
130
131 /* Checking that matched glyph is actually a base glyph by GDEF is too strong; disabled */
132 //if (!_hb_glyph_info_is_base_glyph (&buffer->info[skippy_iter.idx])) { return_trace (false); }
133
134 unsigned int base_index = (this+baseCoverage).get_coverage (buffer->info[skippy_iter.idx].codepoint);
135 if (base_index == NOT_COVERED)
136 {
137 buffer->unsafe_to_concat_from_outbuffer (skippy_iter.idx, buffer->idx + 1);
138 return_trace (false);
139 }
140
141 return_trace ((this+markArray).apply (c, mark_index, base_index, this+baseArray, classCount, skippy_iter.idx));
142 }
143
144 bool subset (hb_subset_context_t *c) const
145 {
146 TRACE_SUBSET (this);
147 const hb_set_t &glyphset = *c->plan->glyphset_gsub ();
148 const hb_map_t &glyph_map = *c->plan->glyph_map;
149
150 auto *out = c->serializer->start_embed (*this);
151 if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
152 out->format = format;
153
154 hb_map_t klass_mapping;
155 Markclass_closure_and_remap_indexes (this+markCoverage, this+markArray, glyphset, &klass_mapping);
156
157 if (!klass_mapping.get_population ()) return_trace (false);
158 out->classCount = klass_mapping.get_population ();
159
160 auto mark_iter =
161 + hb_zip (this+markCoverage, this+markArray)
162 | hb_filter (glyphset, hb_first)
163 ;
164
165 hb_sorted_vector_t<hb_codepoint_t> new_coverage;
166 + mark_iter
167 | hb_map (hb_first)
168 | hb_map (glyph_map)
169 | hb_sink (new_coverage)
170 ;
171
172 if (!out->markCoverage.serialize_serialize (c->serializer, new_coverage.iter ()))
173 return_trace (false);
174
175 out->markArray.serialize_subset (c, markArray, this,
176 (this+markCoverage).iter (),
177 &klass_mapping);
178
179 unsigned basecount = (this+baseArray).rows;
180 auto base_iter =
181 + hb_zip (this+baseCoverage, hb_range (basecount))
182 | hb_filter (glyphset, hb_first)
183 ;
184
185 new_coverage.reset ();
186 + base_iter
187 | hb_map (hb_first)
188 | hb_map (glyph_map)
189 | hb_sink (new_coverage)
190 ;
191
192 if (!out->baseCoverage.serialize_serialize (c->serializer, new_coverage.iter ()))
193 return_trace (false);
194
195 hb_sorted_vector_t<unsigned> base_indexes;
196 for (const unsigned row : + base_iter
197 | hb_map (hb_second))
198 {
199 + hb_range ((unsigned) classCount)
200 | hb_filter (klass_mapping)
201 | hb_map ([&] (const unsigned col) { return row * (unsigned) classCount + col; })
202 | hb_sink (base_indexes)
203 ;
204 }
205
206 out->baseArray.serialize_subset (c, baseArray, this,
207 base_iter.len (),
208 base_indexes.iter ());
209
210 return_trace (true);
211 }
212 };
213
214
215 }
216 }
217 }
218
219 #endif /* OT_LAYOUT_GPOS_MARKBASEPOSFORMAT1_HH */