Mercurial > hgrepos > Python2 > PyMuPDF
comparison mupdf-source/thirdparty/harfbuzz/src/hb-buffer.hh @ 2:b50eed0cc0ef upstream
ADD: MuPDF v1.26.7: the MuPDF source as downloaded by a default build of PyMuPDF 1.26.4.
The directory name has changed: no version number in the expanded directory now.
| author | Franz Glasner <fzglas.hg@dom66.de> |
|---|---|
| date | Mon, 15 Sep 2025 11:43:07 +0200 |
| parents | |
| children |
comparison
equal
deleted
inserted
replaced
| 1:1d09e1dec1d9 | 2:b50eed0cc0ef |
|---|---|
| 1 /* | |
| 2 * Copyright © 1998-2004 David Turner and Werner Lemberg | |
| 3 * Copyright © 2004,2007,2009,2010 Red Hat, Inc. | |
| 4 * Copyright © 2011,2012 Google, Inc. | |
| 5 * | |
| 6 * This is part of HarfBuzz, a text shaping library. | |
| 7 * | |
| 8 * Permission is hereby granted, without written agreement and without | |
| 9 * license or royalty fees, to use, copy, modify, and distribute this | |
| 10 * software and its documentation for any purpose, provided that the | |
| 11 * above copyright notice and the following two paragraphs appear in | |
| 12 * all copies of this software. | |
| 13 * | |
| 14 * IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR | |
| 15 * DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES | |
| 16 * ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN | |
| 17 * IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH | |
| 18 * DAMAGE. | |
| 19 * | |
| 20 * THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING, | |
| 21 * BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND | |
| 22 * FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE PROVIDED HEREUNDER IS | |
| 23 * ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO | |
| 24 * PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS. | |
| 25 * | |
| 26 * Red Hat Author(s): Owen Taylor, Behdad Esfahbod | |
| 27 * Google Author(s): Behdad Esfahbod | |
| 28 */ | |
| 29 | |
| 30 #ifndef HB_BUFFER_HH | |
| 31 #define HB_BUFFER_HH | |
| 32 | |
| 33 #include "hb.hh" | |
| 34 #include "hb-unicode.hh" | |
| 35 #include "hb-set-digest.hh" | |
| 36 | |
| 37 | |
| 38 #ifndef HB_BUFFER_MAX_LEN_FACTOR | |
| 39 #define HB_BUFFER_MAX_LEN_FACTOR 64 | |
| 40 #endif | |
| 41 #ifndef HB_BUFFER_MAX_LEN_MIN | |
| 42 #define HB_BUFFER_MAX_LEN_MIN 16384 | |
| 43 #endif | |
| 44 #ifndef HB_BUFFER_MAX_LEN_DEFAULT | |
| 45 #define HB_BUFFER_MAX_LEN_DEFAULT 0x3FFFFFFF /* Shaping more than a billion chars? Let us know! */ | |
| 46 #endif | |
| 47 | |
| 48 #ifndef HB_BUFFER_MAX_OPS_FACTOR | |
| 49 #define HB_BUFFER_MAX_OPS_FACTOR 1024 | |
| 50 #endif | |
| 51 #ifndef HB_BUFFER_MAX_OPS_MIN | |
| 52 #define HB_BUFFER_MAX_OPS_MIN 16384 | |
| 53 #endif | |
| 54 #ifndef HB_BUFFER_MAX_OPS_DEFAULT | |
| 55 #define HB_BUFFER_MAX_OPS_DEFAULT 0x1FFFFFFF /* Shaping more than a billion operations? Let us know! */ | |
| 56 #endif | |
| 57 | |
| 58 static_assert ((sizeof (hb_glyph_info_t) == 20), ""); | |
| 59 static_assert ((sizeof (hb_glyph_info_t) == sizeof (hb_glyph_position_t)), ""); | |
| 60 | |
| 61 HB_MARK_AS_FLAG_T (hb_glyph_flags_t); | |
| 62 HB_MARK_AS_FLAG_T (hb_buffer_flags_t); | |
| 63 HB_MARK_AS_FLAG_T (hb_buffer_serialize_flags_t); | |
| 64 HB_MARK_AS_FLAG_T (hb_buffer_diff_flags_t); | |
| 65 | |
| 66 enum hb_buffer_scratch_flags_t { | |
| 67 HB_BUFFER_SCRATCH_FLAG_DEFAULT = 0x00000000u, | |
| 68 HB_BUFFER_SCRATCH_FLAG_HAS_NON_ASCII = 0x00000001u, | |
| 69 HB_BUFFER_SCRATCH_FLAG_HAS_DEFAULT_IGNORABLES = 0x00000002u, | |
| 70 HB_BUFFER_SCRATCH_FLAG_HAS_SPACE_FALLBACK = 0x00000004u, | |
| 71 HB_BUFFER_SCRATCH_FLAG_HAS_GPOS_ATTACHMENT = 0x00000008u, | |
| 72 HB_BUFFER_SCRATCH_FLAG_HAS_CGJ = 0x00000010u, | |
| 73 HB_BUFFER_SCRATCH_FLAG_HAS_GLYPH_FLAGS = 0x00000020u, | |
| 74 HB_BUFFER_SCRATCH_FLAG_HAS_BROKEN_SYLLABLE = 0x00000040u, | |
| 75 | |
| 76 /* Reserved for shapers' internal use. */ | |
| 77 HB_BUFFER_SCRATCH_FLAG_SHAPER0 = 0x01000000u, | |
| 78 HB_BUFFER_SCRATCH_FLAG_SHAPER1 = 0x02000000u, | |
| 79 HB_BUFFER_SCRATCH_FLAG_SHAPER2 = 0x04000000u, | |
| 80 HB_BUFFER_SCRATCH_FLAG_SHAPER3 = 0x08000000u, | |
| 81 }; | |
| 82 HB_MARK_AS_FLAG_T (hb_buffer_scratch_flags_t); | |
| 83 | |
| 84 | |
| 85 /* | |
| 86 * hb_buffer_t | |
| 87 */ | |
| 88 | |
| 89 struct hb_buffer_t | |
| 90 { | |
| 91 hb_object_header_t header; | |
| 92 | |
| 93 /* | |
| 94 * Information about how the text in the buffer should be treated. | |
| 95 */ | |
| 96 | |
| 97 hb_unicode_funcs_t *unicode; /* Unicode functions */ | |
| 98 hb_buffer_flags_t flags; /* BOT / EOT / etc. */ | |
| 99 hb_buffer_cluster_level_t cluster_level; | |
| 100 hb_codepoint_t replacement; /* U+FFFD or something else. */ | |
| 101 hb_codepoint_t invisible; /* 0 or something else. */ | |
| 102 hb_codepoint_t not_found; /* 0 or something else. */ | |
| 103 | |
| 104 /* | |
| 105 * Buffer contents | |
| 106 */ | |
| 107 | |
| 108 hb_buffer_content_type_t content_type; | |
| 109 hb_segment_properties_t props; /* Script, language, direction */ | |
| 110 | |
| 111 bool successful; /* Allocations successful */ | |
| 112 bool shaping_failed; /* Shaping failure */ | |
| 113 bool have_output; /* Whether we have an output buffer going on */ | |
| 114 bool have_positions; /* Whether we have positions */ | |
| 115 | |
| 116 unsigned int idx; /* Cursor into ->info and ->pos arrays */ | |
| 117 unsigned int len; /* Length of ->info and ->pos arrays */ | |
| 118 unsigned int out_len; /* Length of ->out_info array if have_output */ | |
| 119 | |
| 120 unsigned int allocated; /* Length of allocated arrays */ | |
| 121 hb_glyph_info_t *info; | |
| 122 hb_glyph_info_t *out_info; | |
| 123 hb_glyph_position_t *pos; | |
| 124 | |
| 125 /* Text before / after the main buffer contents. | |
| 126 * Always in Unicode, and ordered outward. | |
| 127 * Index 0 is for "pre-context", 1 for "post-context". */ | |
| 128 static constexpr unsigned CONTEXT_LENGTH = 5u; | |
| 129 hb_codepoint_t context[2][CONTEXT_LENGTH]; | |
| 130 unsigned int context_len[2]; | |
| 131 | |
| 132 | |
| 133 /* | |
| 134 * Managed by enter / leave | |
| 135 */ | |
| 136 | |
| 137 uint8_t allocated_var_bits; | |
| 138 uint8_t serial; | |
| 139 hb_buffer_scratch_flags_t scratch_flags; /* Have space-fallback, etc. */ | |
| 140 unsigned int max_len; /* Maximum allowed len. */ | |
| 141 int max_ops; /* Maximum allowed operations. */ | |
| 142 /* The bits here reflect current allocations of the bytes in glyph_info_t's var1 and var2. */ | |
| 143 | |
| 144 | |
| 145 /* | |
| 146 * Messaging callback | |
| 147 */ | |
| 148 | |
| 149 #ifndef HB_NO_BUFFER_MESSAGE | |
| 150 hb_buffer_message_func_t message_func; | |
| 151 void *message_data; | |
| 152 hb_destroy_func_t message_destroy; | |
| 153 unsigned message_depth; /* How deeply are we inside a message callback? */ | |
| 154 #else | |
| 155 static constexpr unsigned message_depth = 0u; | |
| 156 #endif | |
| 157 | |
| 158 | |
| 159 | |
| 160 /* Methods */ | |
| 161 | |
| 162 HB_NODISCARD bool in_error () const { return !successful; } | |
| 163 | |
| 164 void allocate_var (unsigned int start, unsigned int count) | |
| 165 { | |
| 166 unsigned int end = start + count; | |
| 167 assert (end <= 8); | |
| 168 unsigned int bits = (1u<<end) - (1u<<start); | |
| 169 assert (0 == (allocated_var_bits & bits)); | |
| 170 allocated_var_bits |= bits; | |
| 171 } | |
| 172 bool try_allocate_var (unsigned int start, unsigned int count) | |
| 173 { | |
| 174 unsigned int end = start + count; | |
| 175 assert (end <= 8); | |
| 176 unsigned int bits = (1u<<end) - (1u<<start); | |
| 177 if (allocated_var_bits & bits) | |
| 178 return false; | |
| 179 allocated_var_bits |= bits; | |
| 180 return true; | |
| 181 } | |
| 182 void deallocate_var (unsigned int start, unsigned int count) | |
| 183 { | |
| 184 unsigned int end = start + count; | |
| 185 assert (end <= 8); | |
| 186 unsigned int bits = (1u<<end) - (1u<<start); | |
| 187 assert (bits == (allocated_var_bits & bits)); | |
| 188 allocated_var_bits &= ~bits; | |
| 189 } | |
| 190 void assert_var (unsigned int start, unsigned int count) | |
| 191 { | |
| 192 unsigned int end = start + count; | |
| 193 assert (end <= 8); | |
| 194 HB_UNUSED unsigned int bits = (1u<<end) - (1u<<start); | |
| 195 assert (bits == (allocated_var_bits & bits)); | |
| 196 } | |
| 197 void deallocate_var_all () | |
| 198 { | |
| 199 allocated_var_bits = 0; | |
| 200 } | |
| 201 | |
| 202 hb_glyph_info_t &cur (unsigned int i = 0) { return info[idx + i]; } | |
| 203 hb_glyph_info_t cur (unsigned int i = 0) const { return info[idx + i]; } | |
| 204 | |
| 205 hb_glyph_position_t &cur_pos (unsigned int i = 0) { return pos[idx + i]; } | |
| 206 hb_glyph_position_t cur_pos (unsigned int i = 0) const { return pos[idx + i]; } | |
| 207 | |
| 208 hb_glyph_info_t &prev () { return out_info[out_len ? out_len - 1 : 0]; } | |
| 209 hb_glyph_info_t prev () const { return out_info[out_len ? out_len - 1 : 0]; } | |
| 210 | |
| 211 hb_set_digest_t digest () const | |
| 212 { | |
| 213 hb_set_digest_t d; | |
| 214 d.init (); | |
| 215 d.add_array (&info[0].codepoint, len, sizeof (info[0])); | |
| 216 return d; | |
| 217 } | |
| 218 | |
| 219 HB_INTERNAL void similar (const hb_buffer_t &src); | |
| 220 HB_INTERNAL void reset (); | |
| 221 HB_INTERNAL void clear (); | |
| 222 | |
| 223 /* Called around shape() */ | |
| 224 HB_INTERNAL void enter (); | |
| 225 HB_INTERNAL void leave (); | |
| 226 | |
| 227 #ifndef HB_NO_BUFFER_VERIFY | |
| 228 HB_INTERNAL | |
| 229 #endif | |
| 230 bool verify (hb_buffer_t *text_buffer, | |
| 231 hb_font_t *font, | |
| 232 const hb_feature_t *features, | |
| 233 unsigned int num_features, | |
| 234 const char * const *shapers) | |
| 235 #ifndef HB_NO_BUFFER_VERIFY | |
| 236 ; | |
| 237 #else | |
| 238 { return true; } | |
| 239 #endif | |
| 240 | |
| 241 unsigned int backtrack_len () const { return have_output ? out_len : idx; } | |
| 242 unsigned int lookahead_len () const { return len - idx; } | |
| 243 uint8_t next_serial () { return ++serial ? serial : ++serial; } | |
| 244 | |
| 245 HB_INTERNAL void add (hb_codepoint_t codepoint, | |
| 246 unsigned int cluster); | |
| 247 HB_INTERNAL void add_info (const hb_glyph_info_t &glyph_info); | |
| 248 | |
| 249 void reverse_range (unsigned start, unsigned end) | |
| 250 { | |
| 251 hb_array_t<hb_glyph_info_t> (info, len).reverse (start, end); | |
| 252 if (have_positions) | |
| 253 hb_array_t<hb_glyph_position_t> (pos, len).reverse (start, end); | |
| 254 } | |
| 255 void reverse () { reverse_range (0, len); } | |
| 256 | |
| 257 template <typename FuncType> | |
| 258 void reverse_groups (const FuncType& group, | |
| 259 bool merge_clusters = false) | |
| 260 { | |
| 261 if (unlikely (!len)) | |
| 262 return; | |
| 263 | |
| 264 unsigned start = 0; | |
| 265 unsigned i; | |
| 266 for (i = 1; i < len; i++) | |
| 267 { | |
| 268 if (!group (info[i - 1], info[i])) | |
| 269 { | |
| 270 if (merge_clusters) | |
| 271 this->merge_clusters (start, i); | |
| 272 reverse_range (start, i); | |
| 273 start = i; | |
| 274 } | |
| 275 } | |
| 276 if (merge_clusters) | |
| 277 this->merge_clusters (start, i); | |
| 278 reverse_range (start, i); | |
| 279 | |
| 280 reverse (); | |
| 281 } | |
| 282 | |
| 283 template <typename FuncType> | |
| 284 unsigned group_end (unsigned start, const FuncType& group) const | |
| 285 { | |
| 286 while (++start < len && group (info[start - 1], info[start])) | |
| 287 ; | |
| 288 | |
| 289 return start; | |
| 290 } | |
| 291 | |
| 292 static bool _cluster_group_func (const hb_glyph_info_t& a, | |
| 293 const hb_glyph_info_t& b) | |
| 294 { return a.cluster == b.cluster; } | |
| 295 | |
| 296 void reverse_clusters () { reverse_groups (_cluster_group_func); } | |
| 297 | |
| 298 HB_INTERNAL void guess_segment_properties (); | |
| 299 | |
| 300 HB_INTERNAL bool sync (); | |
| 301 HB_INTERNAL int sync_so_far (); | |
| 302 HB_INTERNAL void clear_output (); | |
| 303 HB_INTERNAL void clear_positions (); | |
| 304 | |
| 305 template <typename T> | |
| 306 HB_NODISCARD bool replace_glyphs (unsigned int num_in, | |
| 307 unsigned int num_out, | |
| 308 const T *glyph_data) | |
| 309 { | |
| 310 if (unlikely (!make_room_for (num_in, num_out))) return false; | |
| 311 | |
| 312 assert (idx + num_in <= len); | |
| 313 | |
| 314 merge_clusters (idx, idx + num_in); | |
| 315 | |
| 316 hb_glyph_info_t &orig_info = idx < len ? cur() : prev(); | |
| 317 | |
| 318 hb_glyph_info_t *pinfo = &out_info[out_len]; | |
| 319 for (unsigned int i = 0; i < num_out; i++) | |
| 320 { | |
| 321 *pinfo = orig_info; | |
| 322 pinfo->codepoint = glyph_data[i]; | |
| 323 pinfo++; | |
| 324 } | |
| 325 | |
| 326 idx += num_in; | |
| 327 out_len += num_out; | |
| 328 return true; | |
| 329 } | |
| 330 | |
| 331 HB_NODISCARD bool replace_glyph (hb_codepoint_t glyph_index) | |
| 332 { return replace_glyphs (1, 1, &glyph_index); } | |
| 333 | |
| 334 /* Makes a copy of the glyph at idx to output and replace glyph_index */ | |
| 335 HB_NODISCARD bool output_glyph (hb_codepoint_t glyph_index) | |
| 336 { return replace_glyphs (0, 1, &glyph_index); } | |
| 337 | |
| 338 HB_NODISCARD bool output_info (const hb_glyph_info_t &glyph_info) | |
| 339 { | |
| 340 if (unlikely (!make_room_for (0, 1))) return false; | |
| 341 | |
| 342 out_info[out_len] = glyph_info; | |
| 343 | |
| 344 out_len++; | |
| 345 return true; | |
| 346 } | |
| 347 /* Copies glyph at idx to output but doesn't advance idx */ | |
| 348 HB_NODISCARD bool copy_glyph () | |
| 349 { | |
| 350 /* Extra copy because cur()'s return can be freed within | |
| 351 * output_info() call if buffer reallocates. */ | |
| 352 return output_info (hb_glyph_info_t (cur())); | |
| 353 } | |
| 354 | |
| 355 /* Copies glyph at idx to output and advance idx. | |
| 356 * If there's no output, just advance idx. */ | |
| 357 HB_NODISCARD bool next_glyph () | |
| 358 { | |
| 359 if (have_output) | |
| 360 { | |
| 361 if (out_info != info || out_len != idx) | |
| 362 { | |
| 363 if (unlikely (!make_room_for (1, 1))) return false; | |
| 364 out_info[out_len] = info[idx]; | |
| 365 } | |
| 366 out_len++; | |
| 367 } | |
| 368 | |
| 369 idx++; | |
| 370 return true; | |
| 371 } | |
| 372 /* Copies n glyphs at idx to output and advance idx. | |
| 373 * If there's no output, just advance idx. */ | |
| 374 HB_NODISCARD bool next_glyphs (unsigned int n) | |
| 375 { | |
| 376 if (have_output) | |
| 377 { | |
| 378 if (out_info != info || out_len != idx) | |
| 379 { | |
| 380 if (unlikely (!make_room_for (n, n))) return false; | |
| 381 memmove (out_info + out_len, info + idx, n * sizeof (out_info[0])); | |
| 382 } | |
| 383 out_len += n; | |
| 384 } | |
| 385 | |
| 386 idx += n; | |
| 387 return true; | |
| 388 } | |
| 389 /* Advance idx without copying to output. */ | |
| 390 void skip_glyph () { idx++; } | |
| 391 void reset_masks (hb_mask_t mask) | |
| 392 { | |
| 393 for (unsigned int j = 0; j < len; j++) | |
| 394 info[j].mask = mask; | |
| 395 } | |
| 396 void add_masks (hb_mask_t mask) | |
| 397 { | |
| 398 for (unsigned int j = 0; j < len; j++) | |
| 399 info[j].mask |= mask; | |
| 400 } | |
| 401 HB_INTERNAL void set_masks (hb_mask_t value, hb_mask_t mask, | |
| 402 unsigned int cluster_start, unsigned int cluster_end); | |
| 403 | |
| 404 void merge_clusters (unsigned int start, unsigned int end) | |
| 405 { | |
| 406 if (end - start < 2) | |
| 407 return; | |
| 408 merge_clusters_impl (start, end); | |
| 409 } | |
| 410 HB_INTERNAL void merge_clusters_impl (unsigned int start, unsigned int end); | |
| 411 HB_INTERNAL void merge_out_clusters (unsigned int start, unsigned int end); | |
| 412 /* Merge clusters for deleting current glyph, and skip it. */ | |
| 413 HB_INTERNAL void delete_glyph (); | |
| 414 HB_INTERNAL void delete_glyphs_inplace (bool (*filter) (const hb_glyph_info_t *info)); | |
| 415 | |
| 416 | |
| 417 | |
| 418 /* Adds glyph flags in mask to infos with clusters between start and end. | |
| 419 * The start index will be from out-buffer if from_out_buffer is true. | |
| 420 * If interior is true, then the cluster having the minimum value is skipped. */ | |
| 421 void _set_glyph_flags (hb_mask_t mask, | |
| 422 unsigned start = 0, | |
| 423 unsigned end = (unsigned) -1, | |
| 424 bool interior = false, | |
| 425 bool from_out_buffer = false) | |
| 426 { | |
| 427 end = hb_min (end, len); | |
| 428 | |
| 429 if (interior && !from_out_buffer && end - start < 2) | |
| 430 return; | |
| 431 | |
| 432 scratch_flags |= HB_BUFFER_SCRATCH_FLAG_HAS_GLYPH_FLAGS; | |
| 433 | |
| 434 if (!from_out_buffer || !have_output) | |
| 435 { | |
| 436 if (!interior) | |
| 437 { | |
| 438 for (unsigned i = start; i < end; i++) | |
| 439 info[i].mask |= mask; | |
| 440 } | |
| 441 else | |
| 442 { | |
| 443 unsigned cluster = _infos_find_min_cluster (info, start, end); | |
| 444 _infos_set_glyph_flags (info, start, end, cluster, mask); | |
| 445 } | |
| 446 } | |
| 447 else | |
| 448 { | |
| 449 assert (start <= out_len); | |
| 450 assert (idx <= end); | |
| 451 | |
| 452 if (!interior) | |
| 453 { | |
| 454 for (unsigned i = start; i < out_len; i++) | |
| 455 out_info[i].mask |= mask; | |
| 456 for (unsigned i = idx; i < end; i++) | |
| 457 info[i].mask |= mask; | |
| 458 } | |
| 459 else | |
| 460 { | |
| 461 unsigned cluster = _infos_find_min_cluster (info, idx, end); | |
| 462 cluster = _infos_find_min_cluster (out_info, start, out_len, cluster); | |
| 463 | |
| 464 _infos_set_glyph_flags (out_info, start, out_len, cluster, mask); | |
| 465 _infos_set_glyph_flags (info, idx, end, cluster, mask); | |
| 466 } | |
| 467 } | |
| 468 } | |
| 469 | |
| 470 void unsafe_to_break (unsigned int start = 0, unsigned int end = -1) | |
| 471 { | |
| 472 _set_glyph_flags (HB_GLYPH_FLAG_UNSAFE_TO_BREAK | HB_GLYPH_FLAG_UNSAFE_TO_CONCAT, | |
| 473 start, end, | |
| 474 true); | |
| 475 } | |
| 476 void safe_to_insert_tatweel (unsigned int start = 0, unsigned int end = -1) | |
| 477 { | |
| 478 if ((flags & HB_BUFFER_FLAG_PRODUCE_SAFE_TO_INSERT_TATWEEL) == 0) | |
| 479 { | |
| 480 unsafe_to_break (start, end); | |
| 481 return; | |
| 482 } | |
| 483 _set_glyph_flags (HB_GLYPH_FLAG_SAFE_TO_INSERT_TATWEEL, | |
| 484 start, end, | |
| 485 true); | |
| 486 } | |
| 487 void unsafe_to_concat (unsigned int start = 0, unsigned int end = -1) | |
| 488 { | |
| 489 if (likely ((flags & HB_BUFFER_FLAG_PRODUCE_UNSAFE_TO_CONCAT) == 0)) | |
| 490 return; | |
| 491 _set_glyph_flags (HB_GLYPH_FLAG_UNSAFE_TO_CONCAT, | |
| 492 start, end, | |
| 493 true); | |
| 494 } | |
| 495 void unsafe_to_break_from_outbuffer (unsigned int start = 0, unsigned int end = -1) | |
| 496 { | |
| 497 _set_glyph_flags (HB_GLYPH_FLAG_UNSAFE_TO_BREAK | HB_GLYPH_FLAG_UNSAFE_TO_CONCAT, | |
| 498 start, end, | |
| 499 true, true); | |
| 500 } | |
| 501 void unsafe_to_concat_from_outbuffer (unsigned int start = 0, unsigned int end = -1) | |
| 502 { | |
| 503 if (likely ((flags & HB_BUFFER_FLAG_PRODUCE_UNSAFE_TO_CONCAT) == 0)) | |
| 504 return; | |
| 505 _set_glyph_flags (HB_GLYPH_FLAG_UNSAFE_TO_CONCAT, | |
| 506 start, end, | |
| 507 false, true); | |
| 508 } | |
| 509 | |
| 510 | |
| 511 /* Internal methods */ | |
| 512 HB_NODISCARD HB_INTERNAL bool move_to (unsigned int i); /* i is output-buffer index. */ | |
| 513 | |
| 514 HB_NODISCARD HB_INTERNAL bool enlarge (unsigned int size); | |
| 515 | |
| 516 HB_NODISCARD bool ensure (unsigned int size) | |
| 517 { return likely (!size || size < allocated) ? true : enlarge (size); } | |
| 518 | |
| 519 HB_NODISCARD bool ensure_inplace (unsigned int size) | |
| 520 { return likely (!size || size < allocated); } | |
| 521 | |
| 522 void assert_glyphs () | |
| 523 { | |
| 524 assert ((content_type == HB_BUFFER_CONTENT_TYPE_GLYPHS) || | |
| 525 (!len && (content_type == HB_BUFFER_CONTENT_TYPE_INVALID))); | |
| 526 } | |
| 527 void assert_unicode () | |
| 528 { | |
| 529 assert ((content_type == HB_BUFFER_CONTENT_TYPE_UNICODE) || | |
| 530 (!len && (content_type == HB_BUFFER_CONTENT_TYPE_INVALID))); | |
| 531 } | |
| 532 HB_NODISCARD bool ensure_glyphs () | |
| 533 { | |
| 534 if (unlikely (content_type != HB_BUFFER_CONTENT_TYPE_GLYPHS)) | |
| 535 { | |
| 536 if (content_type != HB_BUFFER_CONTENT_TYPE_INVALID) | |
| 537 return false; | |
| 538 assert (len == 0); | |
| 539 content_type = HB_BUFFER_CONTENT_TYPE_GLYPHS; | |
| 540 } | |
| 541 return true; | |
| 542 } | |
| 543 HB_NODISCARD bool ensure_unicode () | |
| 544 { | |
| 545 if (unlikely (content_type != HB_BUFFER_CONTENT_TYPE_UNICODE)) | |
| 546 { | |
| 547 if (content_type != HB_BUFFER_CONTENT_TYPE_INVALID) | |
| 548 return false; | |
| 549 assert (len == 0); | |
| 550 content_type = HB_BUFFER_CONTENT_TYPE_UNICODE; | |
| 551 } | |
| 552 return true; | |
| 553 } | |
| 554 | |
| 555 HB_NODISCARD HB_INTERNAL bool make_room_for (unsigned int num_in, unsigned int num_out); | |
| 556 HB_NODISCARD HB_INTERNAL bool shift_forward (unsigned int count); | |
| 557 | |
| 558 typedef long scratch_buffer_t; | |
| 559 HB_INTERNAL scratch_buffer_t *get_scratch_buffer (unsigned int *size); | |
| 560 | |
| 561 void clear_context (unsigned int side) { context_len[side] = 0; } | |
| 562 | |
| 563 HB_INTERNAL void sort (unsigned int start, unsigned int end, int(*compar)(const hb_glyph_info_t *, const hb_glyph_info_t *)); | |
| 564 | |
| 565 bool messaging () | |
| 566 { | |
| 567 #ifdef HB_NO_BUFFER_MESSAGE | |
| 568 return false; | |
| 569 #else | |
| 570 return unlikely (message_func); | |
| 571 #endif | |
| 572 } | |
| 573 bool message (hb_font_t *font, const char *fmt, ...) HB_PRINTF_FUNC(3, 4) | |
| 574 { | |
| 575 #ifdef HB_NO_BUFFER_MESSAGE | |
| 576 return true; | |
| 577 #else | |
| 578 if (likely (!messaging ())) | |
| 579 return true; | |
| 580 | |
| 581 va_list ap; | |
| 582 va_start (ap, fmt); | |
| 583 bool ret = message_impl (font, fmt, ap); | |
| 584 va_end (ap); | |
| 585 | |
| 586 return ret; | |
| 587 #endif | |
| 588 } | |
| 589 HB_INTERNAL bool message_impl (hb_font_t *font, const char *fmt, va_list ap) HB_PRINTF_FUNC(3, 0); | |
| 590 | |
| 591 static void | |
| 592 set_cluster (hb_glyph_info_t &inf, unsigned int cluster, unsigned int mask = 0) | |
| 593 { | |
| 594 if (inf.cluster != cluster) | |
| 595 inf.mask = (inf.mask & ~HB_GLYPH_FLAG_DEFINED) | (mask & HB_GLYPH_FLAG_DEFINED); | |
| 596 inf.cluster = cluster; | |
| 597 } | |
| 598 void | |
| 599 _infos_set_glyph_flags (hb_glyph_info_t *infos, | |
| 600 unsigned int start, unsigned int end, | |
| 601 unsigned int cluster, | |
| 602 hb_mask_t mask) | |
| 603 { | |
| 604 for (unsigned int i = start; i < end; i++) | |
| 605 if (cluster != infos[i].cluster) | |
| 606 { | |
| 607 scratch_flags |= HB_BUFFER_SCRATCH_FLAG_HAS_GLYPH_FLAGS; | |
| 608 infos[i].mask |= mask; | |
| 609 } | |
| 610 } | |
| 611 static unsigned | |
| 612 _infos_find_min_cluster (const hb_glyph_info_t *infos, | |
| 613 unsigned start, unsigned end, | |
| 614 unsigned cluster = UINT_MAX) | |
| 615 { | |
| 616 for (unsigned int i = start; i < end; i++) | |
| 617 cluster = hb_min (cluster, infos[i].cluster); | |
| 618 return cluster; | |
| 619 } | |
| 620 | |
| 621 void clear_glyph_flags (hb_mask_t mask = 0) | |
| 622 { | |
| 623 for (unsigned int i = 0; i < len; i++) | |
| 624 info[i].mask = (info[i].mask & ~HB_GLYPH_FLAG_DEFINED) | (mask & HB_GLYPH_FLAG_DEFINED); | |
| 625 } | |
| 626 }; | |
| 627 DECLARE_NULL_INSTANCE (hb_buffer_t); | |
| 628 | |
| 629 | |
| 630 #define foreach_group(buffer, start, end, group_func) \ | |
| 631 for (unsigned int \ | |
| 632 _count = buffer->len, \ | |
| 633 start = 0, end = _count ? buffer->group_end (0, group_func) : 0; \ | |
| 634 start < _count; \ | |
| 635 start = end, end = buffer->group_end (start, group_func)) | |
| 636 | |
| 637 #define foreach_cluster(buffer, start, end) \ | |
| 638 foreach_group (buffer, start, end, hb_buffer_t::_cluster_group_func) | |
| 639 | |
| 640 | |
| 641 #define HB_BUFFER_XALLOCATE_VAR(b, func, var) \ | |
| 642 b->func (offsetof (hb_glyph_info_t, var) - offsetof(hb_glyph_info_t, var1), \ | |
| 643 sizeof (b->info[0].var)) | |
| 644 #define HB_BUFFER_ALLOCATE_VAR(b, var) HB_BUFFER_XALLOCATE_VAR (b, allocate_var, var ()) | |
| 645 #define HB_BUFFER_TRY_ALLOCATE_VAR(b, var) HB_BUFFER_XALLOCATE_VAR (b, try_allocate_var, var ()) | |
| 646 #define HB_BUFFER_DEALLOCATE_VAR(b, var) HB_BUFFER_XALLOCATE_VAR (b, deallocate_var, var ()) | |
| 647 #define HB_BUFFER_ASSERT_VAR(b, var) HB_BUFFER_XALLOCATE_VAR (b, assert_var, var ()) | |
| 648 | |
| 649 | |
| 650 #endif /* HB_BUFFER_HH */ |
