Mercurial > hgrepos > Python2 > PyMuPDF
comparison mupdf-source/thirdparty/harfbuzz/src/OT/Layout/GPOS/MarkMarkPosFormat1.hh @ 2:b50eed0cc0ef upstream
ADD: MuPDF v1.26.7: the MuPDF source as downloaded by a default build of PyMuPDF 1.26.4.
The directory name has changed: no version number in the expanded directory now.
| author | Franz Glasner <fzglas.hg@dom66.de> |
|---|---|
| date | Mon, 15 Sep 2025 11:43:07 +0200 |
| parents | |
| children |
comparison
equal
deleted
inserted
replaced
| 1:1d09e1dec1d9 | 2:b50eed0cc0ef |
|---|---|
| 1 #ifndef OT_LAYOUT_GPOS_MARKMARKPOSFORMAT1_HH | |
| 2 #define OT_LAYOUT_GPOS_MARKMARKPOSFORMAT1_HH | |
| 3 | |
| 4 #include "MarkMarkPosFormat1.hh" | |
| 5 | |
| 6 namespace OT { | |
| 7 namespace Layout { | |
| 8 namespace GPOS_impl { | |
| 9 | |
| 10 typedef AnchorMatrix Mark2Array; /* mark2-major-- | |
| 11 * in order of Mark2Coverage Index--, | |
| 12 * mark1-minor-- | |
| 13 * ordered by class--zero-based. */ | |
| 14 | |
| 15 template <typename Types> | |
| 16 struct MarkMarkPosFormat1_2 | |
| 17 { | |
| 18 protected: | |
| 19 HBUINT16 format; /* Format identifier--format = 1 */ | |
| 20 typename Types::template OffsetTo<Coverage> | |
| 21 mark1Coverage; /* Offset to Combining Mark1 Coverage | |
| 22 * table--from beginning of MarkMarkPos | |
| 23 * subtable */ | |
| 24 typename Types::template OffsetTo<Coverage> | |
| 25 mark2Coverage; /* Offset to Combining Mark2 Coverage | |
| 26 * table--from beginning of MarkMarkPos | |
| 27 * subtable */ | |
| 28 HBUINT16 classCount; /* Number of defined mark classes */ | |
| 29 typename Types::template OffsetTo<MarkArray> | |
| 30 mark1Array; /* Offset to Mark1Array table--from | |
| 31 * beginning of MarkMarkPos subtable */ | |
| 32 typename Types::template OffsetTo<Mark2Array> | |
| 33 mark2Array; /* Offset to Mark2Array table--from | |
| 34 * beginning of MarkMarkPos subtable */ | |
| 35 public: | |
| 36 DEFINE_SIZE_STATIC (4 + 4 * Types::size); | |
| 37 | |
| 38 bool sanitize (hb_sanitize_context_t *c) const | |
| 39 { | |
| 40 TRACE_SANITIZE (this); | |
| 41 return_trace (c->check_struct (this) && | |
| 42 mark1Coverage.sanitize (c, this) && | |
| 43 mark2Coverage.sanitize (c, this) && | |
| 44 mark1Array.sanitize (c, this) && | |
| 45 mark2Array.sanitize (c, this, (unsigned int) classCount)); | |
| 46 } | |
| 47 | |
| 48 bool intersects (const hb_set_t *glyphs) const | |
| 49 { | |
| 50 return (this+mark1Coverage).intersects (glyphs) && | |
| 51 (this+mark2Coverage).intersects (glyphs); | |
| 52 } | |
| 53 | |
| 54 void closure_lookups (hb_closure_lookups_context_t *c) const {} | |
| 55 | |
| 56 void collect_variation_indices (hb_collect_variation_indices_context_t *c) const | |
| 57 { | |
| 58 + hb_zip (this+mark1Coverage, this+mark1Array) | |
| 59 | hb_filter (c->glyph_set, hb_first) | |
| 60 | hb_map (hb_second) | |
| 61 | hb_apply ([&] (const MarkRecord& record) { record.collect_variation_indices (c, &(this+mark1Array)); }) | |
| 62 ; | |
| 63 | |
| 64 hb_map_t klass_mapping; | |
| 65 Markclass_closure_and_remap_indexes (this+mark1Coverage, this+mark1Array, *c->glyph_set, &klass_mapping); | |
| 66 | |
| 67 unsigned mark2_count = (this+mark2Array).rows; | |
| 68 auto mark2_iter = | |
| 69 + hb_zip (this+mark2Coverage, hb_range (mark2_count)) | |
| 70 | hb_filter (c->glyph_set, hb_first) | |
| 71 | hb_map (hb_second) | |
| 72 ; | |
| 73 | |
| 74 hb_sorted_vector_t<unsigned> mark2_indexes; | |
| 75 for (const unsigned row : mark2_iter) | |
| 76 { | |
| 77 + hb_range ((unsigned) classCount) | |
| 78 | hb_filter (klass_mapping) | |
| 79 | hb_map ([&] (const unsigned col) { return row * (unsigned) classCount + col; }) | |
| 80 | hb_sink (mark2_indexes) | |
| 81 ; | |
| 82 } | |
| 83 (this+mark2Array).collect_variation_indices (c, mark2_indexes.iter ()); | |
| 84 } | |
| 85 | |
| 86 void collect_glyphs (hb_collect_glyphs_context_t *c) const | |
| 87 { | |
| 88 if (unlikely (!(this+mark1Coverage).collect_coverage (c->input))) return; | |
| 89 if (unlikely (!(this+mark2Coverage).collect_coverage (c->input))) return; | |
| 90 } | |
| 91 | |
| 92 const Coverage &get_coverage () const { return this+mark1Coverage; } | |
| 93 | |
| 94 bool apply (hb_ot_apply_context_t *c) const | |
| 95 { | |
| 96 TRACE_APPLY (this); | |
| 97 hb_buffer_t *buffer = c->buffer; | |
| 98 unsigned int mark1_index = (this+mark1Coverage).get_coverage (buffer->cur().codepoint); | |
| 99 if (likely (mark1_index == NOT_COVERED)) return_trace (false); | |
| 100 | |
| 101 /* now we search backwards for a suitable mark glyph until a non-mark glyph */ | |
| 102 hb_ot_apply_context_t::skipping_iterator_t &skippy_iter = c->iter_input; | |
| 103 skippy_iter.reset (buffer->idx, 1); | |
| 104 skippy_iter.set_lookup_props (c->lookup_props & ~(uint32_t)LookupFlag::IgnoreFlags); | |
| 105 unsigned unsafe_from; | |
| 106 if (!skippy_iter.prev (&unsafe_from)) | |
| 107 { | |
| 108 buffer->unsafe_to_concat_from_outbuffer (unsafe_from, buffer->idx + 1); | |
| 109 return_trace (false); | |
| 110 } | |
| 111 | |
| 112 if (!_hb_glyph_info_is_mark (&buffer->info[skippy_iter.idx])) | |
| 113 { | |
| 114 buffer->unsafe_to_concat_from_outbuffer (skippy_iter.idx, buffer->idx + 1); | |
| 115 return_trace (false); | |
| 116 } | |
| 117 | |
| 118 unsigned int j = skippy_iter.idx; | |
| 119 | |
| 120 unsigned int id1 = _hb_glyph_info_get_lig_id (&buffer->cur()); | |
| 121 unsigned int id2 = _hb_glyph_info_get_lig_id (&buffer->info[j]); | |
| 122 unsigned int comp1 = _hb_glyph_info_get_lig_comp (&buffer->cur()); | |
| 123 unsigned int comp2 = _hb_glyph_info_get_lig_comp (&buffer->info[j]); | |
| 124 | |
| 125 if (likely (id1 == id2)) | |
| 126 { | |
| 127 if (id1 == 0) /* Marks belonging to the same base. */ | |
| 128 goto good; | |
| 129 else if (comp1 == comp2) /* Marks belonging to the same ligature component. */ | |
| 130 goto good; | |
| 131 } | |
| 132 else | |
| 133 { | |
| 134 /* If ligature ids don't match, it may be the case that one of the marks | |
| 135 * itself is a ligature. In which case match. */ | |
| 136 if ((id1 > 0 && !comp1) || (id2 > 0 && !comp2)) | |
| 137 goto good; | |
| 138 } | |
| 139 | |
| 140 /* Didn't match. */ | |
| 141 buffer->unsafe_to_concat_from_outbuffer (skippy_iter.idx, buffer->idx + 1); | |
| 142 return_trace (false); | |
| 143 | |
| 144 good: | |
| 145 unsigned int mark2_index = (this+mark2Coverage).get_coverage (buffer->info[j].codepoint); | |
| 146 if (mark2_index == NOT_COVERED) | |
| 147 { | |
| 148 buffer->unsafe_to_concat_from_outbuffer (skippy_iter.idx, buffer->idx + 1); | |
| 149 return_trace (false); | |
| 150 } | |
| 151 | |
| 152 return_trace ((this+mark1Array).apply (c, mark1_index, mark2_index, this+mark2Array, classCount, j)); | |
| 153 } | |
| 154 | |
| 155 bool subset (hb_subset_context_t *c) const | |
| 156 { | |
| 157 TRACE_SUBSET (this); | |
| 158 const hb_set_t &glyphset = *c->plan->glyphset_gsub (); | |
| 159 const hb_map_t &glyph_map = *c->plan->glyph_map; | |
| 160 | |
| 161 auto *out = c->serializer->start_embed (*this); | |
| 162 if (unlikely (!c->serializer->extend_min (out))) return_trace (false); | |
| 163 out->format = format; | |
| 164 | |
| 165 hb_map_t klass_mapping; | |
| 166 Markclass_closure_and_remap_indexes (this+mark1Coverage, this+mark1Array, glyphset, &klass_mapping); | |
| 167 | |
| 168 if (!klass_mapping.get_population ()) return_trace (false); | |
| 169 out->classCount = klass_mapping.get_population (); | |
| 170 | |
| 171 auto mark1_iter = | |
| 172 + hb_zip (this+mark1Coverage, this+mark1Array) | |
| 173 | hb_filter (glyphset, hb_first) | |
| 174 ; | |
| 175 | |
| 176 hb_sorted_vector_t<hb_codepoint_t> new_coverage; | |
| 177 + mark1_iter | |
| 178 | hb_map (hb_first) | |
| 179 | hb_map (glyph_map) | |
| 180 | hb_sink (new_coverage) | |
| 181 ; | |
| 182 | |
| 183 if (!out->mark1Coverage.serialize_serialize (c->serializer, new_coverage.iter ())) | |
| 184 return_trace (false); | |
| 185 | |
| 186 out->mark1Array.serialize_subset (c, mark1Array, this, | |
| 187 (this+mark1Coverage).iter (), | |
| 188 &klass_mapping); | |
| 189 | |
| 190 unsigned mark2count = (this+mark2Array).rows; | |
| 191 auto mark2_iter = | |
| 192 + hb_zip (this+mark2Coverage, hb_range (mark2count)) | |
| 193 | hb_filter (glyphset, hb_first) | |
| 194 ; | |
| 195 | |
| 196 new_coverage.reset (); | |
| 197 + mark2_iter | |
| 198 | hb_map (hb_first) | |
| 199 | hb_map (glyph_map) | |
| 200 | hb_sink (new_coverage) | |
| 201 ; | |
| 202 | |
| 203 if (!out->mark2Coverage.serialize_serialize (c->serializer, new_coverage.iter ())) | |
| 204 return_trace (false); | |
| 205 | |
| 206 hb_sorted_vector_t<unsigned> mark2_indexes; | |
| 207 for (const unsigned row : + mark2_iter | |
| 208 | hb_map (hb_second)) | |
| 209 { | |
| 210 + hb_range ((unsigned) classCount) | |
| 211 | hb_filter (klass_mapping) | |
| 212 | hb_map ([&] (const unsigned col) { return row * (unsigned) classCount + col; }) | |
| 213 | hb_sink (mark2_indexes) | |
| 214 ; | |
| 215 } | |
| 216 | |
| 217 out->mark2Array.serialize_subset (c, mark2Array, this, mark2_iter.len (), mark2_indexes.iter ()); | |
| 218 | |
| 219 return_trace (true); | |
| 220 } | |
| 221 }; | |
| 222 | |
| 223 | |
| 224 } | |
| 225 } | |
| 226 } | |
| 227 | |
| 228 #endif /* OT_LAYOUT_GPOS_MARKMARKPOSFORMAT1_HH */ |
