HarfBuzz: Update to version 3.1.2

This commit is contained in:
bruvzg 2021-11-30 12:14:06 +02:00
parent e223a9c129
commit 371010c5f9
No known key found for this signature in database
GPG Key ID: 7960FCF39844EC38
14 changed files with 130 additions and 59 deletions

View File

@ -201,7 +201,7 @@ Files extracted from upstream source:
## harfbuzz
- Upstream: https://github.com/harfbuzz/harfbuzz
- Version: 3.1.1 (cd5c6cd0419ac5e4de975d6c476fb760bf06d2ce, 2021)
- Version: 3.1.2 (8aed5c21a31eece6a9f3cd775fda8facb6c28b9b, 2021)
- License: MIT
Files extracted from upstream source:

View File

@ -51,13 +51,19 @@ struct hb_array_t : hb_iter_with_fallback_t<hb_array_t<Type>, Type&>
* Constructors.
*/
hb_array_t () = default;
hb_array_t (Type *array_, unsigned int length_) : arrayZ (array_), length (length_) {}
hb_array_t (const hb_array_t&) = default;
~hb_array_t () = default;
hb_array_t& operator= (const hb_array_t&) = default;
hb_array_t& operator= (hb_array_t&&) = default;
constexpr hb_array_t (std::nullptr_t) : hb_array_t () {}
constexpr hb_array_t (Type *array_, unsigned int length_) : arrayZ (array_), length (length_) {}
template <unsigned int length_>
hb_array_t (Type (&array_)[length_]) : hb_array_t (array_, length_) {}
constexpr hb_array_t (Type (&array_)[length_]) : hb_array_t (array_, length_) {}
template <typename U,
hb_enable_if (hb_is_cr_convertible(U, Type))>
hb_array_t (const hb_array_t<U> &o) :
constexpr hb_array_t (const hb_array_t<U> &o) :
hb_iter_with_fallback_t<hb_array_t, Type&> (),
arrayZ (o.arrayZ), length (o.length), backwards_length (o.backwards_length) {}
template <typename U,
@ -303,13 +309,19 @@ struct hb_sorted_array_t :
static constexpr bool is_sorted_iterator = true;
hb_sorted_array_t () = default;
hb_sorted_array_t (Type *array_, unsigned int length_) : hb_array_t<Type> (array_, length_) {}
hb_sorted_array_t (const hb_sorted_array_t&) = default;
~hb_sorted_array_t () = default;
hb_sorted_array_t& operator= (const hb_sorted_array_t&) = default;
hb_sorted_array_t& operator= (hb_sorted_array_t&&) = default;
constexpr hb_sorted_array_t (std::nullptr_t) : hb_sorted_array_t () {}
constexpr hb_sorted_array_t (Type *array_, unsigned int length_) : hb_array_t<Type> (array_, length_) {}
template <unsigned int length_>
hb_sorted_array_t (Type (&array_)[length_]) : hb_array_t<Type> (array_) {}
constexpr hb_sorted_array_t (Type (&array_)[length_]) : hb_array_t<Type> (array_) {}
template <typename U,
hb_enable_if (hb_is_cr_convertible(U, Type))>
hb_sorted_array_t (const hb_array_t<U> &o) :
constexpr hb_sorted_array_t (const hb_array_t<U> &o) :
hb_iter_t<hb_sorted_array_t, Type&> (),
hb_array_t<Type> (o) {}
template <typename U,

View File

@ -35,8 +35,10 @@
*/
template <typename K, typename V,
K kINVALID = hb_is_pointer (K) ? 0 : std::is_signed<K>::value ? hb_int_min (K) : (K) -1,
V vINVALID = hb_is_pointer (V) ? 0 : std::is_signed<V>::value ? hb_int_min (V) : (V) -1>
typename k_invalid_t = K,
typename v_invalid_t = V,
k_invalid_t kINVALID = hb_is_pointer (K) ? 0 : std::is_signed<K>::value ? hb_int_min (K) : (K) -1,
v_invalid_t vINVALID = hb_is_pointer (V) ? 0 : std::is_signed<V>::value ? hb_int_min (V) : (V) -1>
struct hb_hashmap_t
{
static constexpr K INVALID_KEY = kINVALID;
@ -62,8 +64,10 @@ struct hb_hashmap_t
hb_copy (o, *this);
}
static_assert (std::is_integral<K>::value || hb_is_pointer (K), "");
static_assert (std::is_integral<V>::value || hb_is_pointer (V), "");
static_assert (std::is_trivially_copyable<K>::value, "");
static_assert (std::is_trivially_copyable<V>::value, "");
static_assert (std::is_trivially_destructible<K>::value, "");
static_assert (std::is_trivially_destructible<V>::value, "");
struct item_t
{
@ -347,20 +351,24 @@ struct hb_hashmap_t
*/
struct hb_map_t : hb_hashmap_t<hb_codepoint_t,
hb_codepoint_t,
hb_codepoint_t,
hb_codepoint_t,
HB_MAP_VALUE_INVALID,
HB_MAP_VALUE_INVALID>
{
using hashmap = hb_hashmap_t<hb_codepoint_t,
hb_codepoint_t,
hb_codepoint_t,
hb_codepoint_t,
HB_MAP_VALUE_INVALID,
HB_MAP_VALUE_INVALID>;
hb_map_t () = default;
~hb_map_t () = default;
hb_map_t (hb_map_t& o) = default;
hb_map_t& operator= (const hb_map_t& other) = default;
hb_map_t& operator= (hb_map_t&& other) = default;
hb_map_t (hb_map_t&) = default;
hb_map_t& operator= (const hb_map_t&) = default;
hb_map_t& operator= (hb_map_t&&) = default;
hb_map_t (std::initializer_list<hb_pair_t<hb_codepoint_t, hb_codepoint_t>> lst) : hashmap (lst) {}
template <typename Iterable,
hb_requires (hb_is_iterable (Iterable))>

View File

@ -38,8 +38,8 @@
*/
#define HB_OT_TAG_COLR HB_TAG('C','O','L','R')
#ifndef COLRV1_MAX_NESTING_LEVEL
#define COLRV1_MAX_NESTING_LEVEL 100
#ifndef HB_COLRV1_MAX_NESTING_LEVEL
#define HB_COLRV1_MAX_NESTING_LEVEL 100
#endif
#ifndef COLRV1_ENABLE_SUBSETTING
@ -102,7 +102,7 @@ struct hb_colrv1_closure_context_t :
hb_set_t *glyphs_,
hb_set_t *layer_indices_,
hb_set_t *palette_indices_,
unsigned nesting_level_left_ = COLRV1_MAX_NESTING_LEVEL) :
unsigned nesting_level_left_ = HB_COLRV1_MAX_NESTING_LEVEL) :
base (base_),
glyphs (glyphs_),
layer_indices (layer_indices_),
@ -1062,6 +1062,18 @@ struct ClipList
struct Paint
{
template <typename ...Ts>
bool sanitize (hb_sanitize_context_t *c, Ts&&... ds) const
{
TRACE_SANITIZE (this);
if (unlikely (!c->check_start_recursion (HB_COLRV1_MAX_NESTING_LEVEL)))
return_trace (c->no_dispatch_return_value ());
return_trace (c->end_recursion (this->dispatch (c, std::forward<Ts> (ds)...)));
}
template <typename context_t, typename ...Ts>
typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const
{

View File

@ -98,7 +98,7 @@ static void ClassDef_remap_and_serialize (hb_serialize_context_t *c,
struct hb_prune_langsys_context_t
{
hb_prune_langsys_context_t (const void *table_,
hb_hashmap_t<unsigned, hb_set_t *, (unsigned)-1, nullptr> *script_langsys_map_,
hb_hashmap_t<unsigned, hb_set_t *> *script_langsys_map_,
const hb_map_t *duplicate_feature_map_,
hb_set_t *new_collected_feature_indexes_)
:table (table_),
@ -137,7 +137,7 @@ struct hb_prune_langsys_context_t
public:
const void *table;
hb_hashmap_t<unsigned, hb_set_t *, (unsigned)-1, nullptr> *script_langsys_map;
hb_hashmap_t<unsigned, hb_set_t *> *script_langsys_map;
const hb_map_t *duplicate_feature_map;
hb_set_t *new_feature_indexes;
@ -179,14 +179,14 @@ struct hb_subset_layout_context_t :
hb_subset_context_t *subset_context;
const hb_tag_t table_tag;
const hb_map_t *lookup_index_map;
const hb_hashmap_t<unsigned, hb_set_t *, (unsigned)-1, nullptr> *script_langsys_map;
const hb_hashmap_t<unsigned, hb_set_t *> *script_langsys_map;
const hb_map_t *feature_index_map;
unsigned cur_script_index;
hb_subset_layout_context_t (hb_subset_context_t *c_,
hb_tag_t tag_,
hb_map_t *lookup_map_,
hb_hashmap_t<unsigned, hb_set_t *, (unsigned)-1, nullptr> *script_langsys_map_,
hb_hashmap_t<unsigned, hb_set_t *> *script_langsys_map_,
hb_map_t *feature_index_map_) :
subset_context (c_),
table_tag (tag_),
@ -1357,7 +1357,7 @@ struct Lookup
if (unlikely (!get_subtables<TSubTable> ().sanitize (c, this, get_type ())))
return_trace (false);
if (unlikely (get_type () == TSubTable::Extension && !c->get_edit_count ()))
if (unlikely (get_type () == TSubTable::Extension && subtables && !c->get_edit_count ()))
{
/* The spec says all subtables of an Extension lookup should
* have the same type, which shall not be the Extension type

View File

@ -163,7 +163,7 @@ struct hb_closure_context_t :
hb_set_t *glyphs_,
hb_set_t *cur_intersected_glyphs_,
hb_map_t *done_lookups_glyph_count_,
hb_hashmap_t<unsigned, hb_set_t *, (unsigned)-1, nullptr> *done_lookups_glyph_set_,
hb_hashmap_t<unsigned, hb_set_t *> *done_lookups_glyph_set_,
unsigned int nesting_level_left_ = HB_MAX_NESTING_LEVEL) :
face (face_),
glyphs (glyphs_),
@ -192,7 +192,7 @@ struct hb_closure_context_t :
private:
hb_map_t *done_lookups_glyph_count;
hb_hashmap_t<unsigned, hb_set_t *, (unsigned)-1, nullptr> *done_lookups_glyph_set;
hb_hashmap_t<unsigned, hb_set_t *> *done_lookups_glyph_set;
unsigned int lookup_count;
};
@ -1642,9 +1642,8 @@ struct Rule
const hb_map_t *klass_map = nullptr) const
{
TRACE_SUBSET (this);
const hb_array_t<const HBUINT16> input = inputZ.as_array ((inputCount ? inputCount - 1 : 0));
if (!input.length) return_trace (false);
if (unlikely (!inputCount)) return_trace (false);
const hb_array_t<const HBUINT16> input = inputZ.as_array (inputCount - 1);
const hb_map_t *mapping = klass_map == nullptr ? c->plan->glyph_map : klass_map;
if (!hb_all (input, mapping)) return_trace (false);
@ -3631,7 +3630,7 @@ struct GSUBGPOS
}
void prune_langsys (const hb_map_t *duplicate_feature_map,
hb_hashmap_t<unsigned, hb_set_t *, (unsigned)-1, nullptr> *script_langsys_map,
hb_hashmap_t<unsigned, hb_set_t *> *script_langsys_map,
hb_set_t *new_feature_indexes /* OUT */) const
{
hb_prune_langsys_context_t c (this, script_langsys_map, duplicate_feature_map, new_feature_indexes);
@ -3689,7 +3688,7 @@ struct GSUBGPOS
hb_map_t *duplicate_feature_map /* OUT */) const
{
if (feature_indices->is_empty ()) return;
hb_hashmap_t<hb_tag_t, hb_set_t *, (unsigned)-1, nullptr> unique_features;
hb_hashmap_t<hb_tag_t, hb_set_t *> unique_features;
//find out duplicate features after subset
for (unsigned i : feature_indices->iter ())
{
@ -3784,8 +3783,12 @@ struct GSUBGPOS
// http://lists.freedesktop.org/archives/harfbuzz/2012-November/002660.html
continue;
if (f.featureParams.is_null ()
&& !f.intersects_lookup_indexes (lookup_indices)
if (!f.featureParams.is_null () &&
tag == HB_TAG ('s', 'i', 'z', 'e'))
continue;
if (!f.intersects_lookup_indexes (lookup_indices)
#ifndef HB_NO_VAR
&& !alternate_feature_indices.has (i)
#endif

View File

@ -1493,7 +1493,7 @@ hb_ot_layout_lookup_substitute_closure (hb_face_t *face,
{
hb_set_t cur_intersected_glyphs;
hb_map_t done_lookups_glyph_count;
hb_hashmap_t<unsigned, hb_set_t *, (unsigned)-1, nullptr> done_lookups_glyph_set;
hb_hashmap_t<unsigned, hb_set_t *> done_lookups_glyph_set;
OT::hb_closure_context_t c (face, glyphs, &cur_intersected_glyphs, &done_lookups_glyph_count, &done_lookups_glyph_set);
const OT::SubstLookup& l = face->table.GSUB->table->get_lookup (lookup_index);
@ -1522,7 +1522,7 @@ hb_ot_layout_lookups_substitute_closure (hb_face_t *face,
{
hb_set_t cur_intersected_glyphs;
hb_map_t done_lookups_glyph_count;
hb_hashmap_t<unsigned, hb_set_t *, (unsigned)-1, nullptr> done_lookups_glyph_set;
hb_hashmap_t<unsigned, hb_set_t *> done_lookups_glyph_set;
OT::hb_closure_context_t c (face, glyphs, &cur_intersected_glyphs, &done_lookups_glyph_count, &done_lookups_glyph_set);
const OT::GSUB& gsub = *face->table.GSUB->table;

View File

@ -79,6 +79,7 @@ HB_INTERNAL bool postV2Tail::subset (hb_subset_context_t *c) const
post::accelerator_t _post;
_post.init (c->plan->source);
hb_hashmap_t<hb_bytes_t, unsigned, std::nullptr_t, unsigned, nullptr, (unsigned)-1> glyph_name_to_new_index;
for (hb_codepoint_t new_gid = 0; new_gid < num_glyphs; new_gid++)
{
hb_codepoint_t old_gid = reverse_glyph_map.get (new_gid);
@ -90,22 +91,28 @@ HB_INTERNAL bool postV2Tail::subset (hb_subset_context_t *c) const
else
{
hb_bytes_t s = _post.find_glyph_name (old_gid);
int standard_glyph_index = -1;
for (unsigned i = 0; i < format1_names_length; i++)
new_index = glyph_name_to_new_index.get (s);
if (new_index == (unsigned)-1)
{
if (s == format1_names (i))
int standard_glyph_index = -1;
for (unsigned i = 0; i < format1_names_length; i++)
{
standard_glyph_index = i;
break;
if (s == format1_names (i))
{
standard_glyph_index = i;
break;
}
}
if (standard_glyph_index == -1)
{
new_index = 258 + i;
i++;
}
else
{ new_index = standard_glyph_index; }
glyph_name_to_new_index.set (s, new_index);
}
if (standard_glyph_index == -1)
{
new_index = 258 + i;
i++;
}
else
{ new_index = standard_glyph_index; }
old_new_index_map.set (old_index, new_index);
}
old_gid_new_index_map.set (old_gid, new_index);

View File

@ -123,6 +123,7 @@ struct hb_sanitize_context_t :
hb_sanitize_context_t () :
start (nullptr), end (nullptr),
max_ops (0), max_subtables (0),
recursion_depth (0),
writable (false), edit_count (0),
blob (nullptr),
num_glyphs (65536),
@ -205,6 +206,7 @@ struct hb_sanitize_context_t :
(unsigned) HB_SANITIZE_MAX_OPS_MAX);
this->edit_count = 0;
this->debug_depth = 0;
this->recursion_depth = 0;
DEBUG_MSG_LEVEL (SANITIZE, start, 0, +1,
"start [%p..%p] (%lu bytes)",
@ -278,6 +280,18 @@ struct hb_sanitize_context_t :
return this->check_range (base, a, b, hb_static_size (T));
}
bool check_start_recursion (int max_depth)
{
if (unlikely (recursion_depth >= max_depth)) return false;
return ++recursion_depth;
}
bool end_recursion (bool result)
{
recursion_depth--;
return result;
}
template <typename Type>
bool check_struct (const Type *obj) const
{ return likely (this->check_range (obj, obj->min_size)); }
@ -389,6 +403,7 @@ struct hb_sanitize_context_t :
const char *start, *end;
mutable int max_ops, max_subtables;
private:
int recursion_depth;
bool writable;
unsigned int edit_count;
hb_blob_t *blob;

View File

@ -652,7 +652,9 @@ struct hb_serialize_context_t
hb_vector_t<object_t *> packed;
/* Map view of packed objects. */
hb_hashmap_t<const object_t *, objidx_t, nullptr, 0> packed_map;
hb_hashmap_t<const object_t *, objidx_t,
const object_t *, objidx_t,
nullptr, 0> packed_map;
};
#endif /* HB_SERIALIZE_HH */

View File

@ -157,9 +157,9 @@ struct hb_set_t : hb_sparseset_t<hb_bit_set_invertible_t>
{
hb_set_t () = default;
~hb_set_t () = default;
hb_set_t (hb_set_t& o) = default;
hb_set_t& operator= (const hb_set_t& other) = default;
hb_set_t& operator= (hb_set_t&& other) = default;
hb_set_t (hb_set_t&) = default;
hb_set_t& operator= (const hb_set_t&) = default;
hb_set_t& operator= (hb_set_t&&) = default;
hb_set_t (std::initializer_list<hb_codepoint_t> lst) : hb_sparseset_t<hb_bit_set_invertible_t> (lst) {}
template <typename Iterable,
hb_requires (hb_is_iterable (Iterable))>

View File

@ -41,7 +41,7 @@
#include "hb-ot-math-table.hh"
typedef hb_hashmap_t<unsigned, hb_set_t *, (unsigned)-1, nullptr> script_langsys_map;
typedef hb_hashmap_t<unsigned, hb_set_t *> script_langsys_map;
#ifndef HB_NO_SUBSET_CFF
static inline void
_add_cff_seac_components (const OT::cff1::accelerator_t &cff,
@ -100,11 +100,23 @@ static void _collect_layout_indices (hb_face_t *face,
if (!features.alloc (table.get_feature_count () + 1))
return;
hb_set_t visited_features;
bool retain_all_features = true;
for (unsigned i = 0; i < table.get_feature_count (); i++)
{
hb_tag_t tag = table.get_feature_tag (i);
if (tag && layout_features_to_retain->has (tag))
features.push (tag);
if (!tag) continue;
if (!layout_features_to_retain->has (tag))
{
retain_all_features = false;
continue;
}
if (visited_features.has (tag))
continue;
features.push (tag);
visited_features.add (tag);
}
if (!features)
@ -113,7 +125,7 @@ static void _collect_layout_indices (hb_face_t *face,
// The collect function needs a null element to signal end of the array.
features.push (0);
if (features.get_size () == table.get_feature_count () + 1)
if (retain_all_features)
{
// Looking for all features, trigger the faster collection method.
layout_collect_func (face,

View File

@ -84,8 +84,8 @@ struct hb_subset_plan_t
hb_map_t *gpos_lookups;
//active langsys we'd like to retain
hb_hashmap_t<unsigned, hb_set_t *, (unsigned)-1, nullptr> *gsub_langsys;
hb_hashmap_t<unsigned, hb_set_t *, (unsigned)-1, nullptr> *gpos_langsys;
hb_hashmap_t<unsigned, hb_set_t *> *gsub_langsys;
hb_hashmap_t<unsigned, hb_set_t *> *gpos_langsys;
//active features after removing redundant langsys and prune_features
hb_map_t *gsub_features;

View File

@ -53,14 +53,14 @@ HB_BEGIN_DECLS
*
* The micro component of the library version available at compile-time.
*/
#define HB_VERSION_MICRO 1
#define HB_VERSION_MICRO 2
/**
* HB_VERSION_STRING:
*
* A string literal containing the library version available at compile-time.
*/
#define HB_VERSION_STRING "3.1.1"
#define HB_VERSION_STRING "3.1.2"
/**
* HB_VERSION_ATLEAST: