Merge pull request #4370 from 2xsaiko/outgoing/cmake-now-for-real
Re-do and clean up CMake support, making it use relocatable paths
diff --git a/src/Makefile.am b/src/Makefile.am
index bc2a3de..ff6a6d6 100644
--- a/src/Makefile.am
+++ b/src/Makefile.am
@@ -458,6 +458,7 @@
test-classdef-graph \
test-instancer-solver \
test-tuple-varstore \
+ test-item-varstore \
$(NULL)
COMPILED_TESTS_CPPFLAGS = $(HBCFLAGS) -DMAIN -UNDEBUG
COMPILED_TESTS_LDADD = libharfbuzz.la $(HBLIBS)
@@ -536,6 +537,10 @@
test_tuple_varstore_CPPFLAGS = $(COMPILED_TESTS_CPPFLAGS)
test_tuple_varstore_LDADD = $(COMPILED_TESTS_LDADD)
+test_item_varstore_SOURCES = test-item-varstore.cc hb-subset-instancer-solver.cc hb-static.cc
+test_item_varstore_CPPFLAGS = $(COMPILED_TESTS_CPPFLAGS)
+test_item_varstore_LDADD = $(COMPILED_TESTS_LDADD)
+
dist_check_SCRIPTS = \
check-c-linkage-decls.py \
check-externs.py \
diff --git a/src/hb-ot-layout-common.hh b/src/hb-ot-layout-common.hh
index 2cc5098..4a9f157 100644
--- a/src/hb-ot-layout-common.hh
+++ b/src/hb-ot-layout-common.hh
@@ -2300,6 +2300,156 @@
* Item Variation Store
*/
+/* ported from fonttools (class _Encoding) */
+struct delta_row_encoding_t
+{
+ /* each byte represents a region, value is one of 0/1/2/4, which means bytes
+ * needed for this region */
+ hb_vector_t<uint8_t> chars;
+ unsigned width;
+ hb_vector_t<uint8_t> columns;
+ unsigned overhead;
+ hb_vector_t<const hb_vector_t<int>*> items;
+
+ delta_row_encoding_t () = default;
+ delta_row_encoding_t (hb_vector_t<uint8_t>&& chars_,
+ const hb_vector_t<int>* row = nullptr) :
+ delta_row_encoding_t ()
+
+ {
+ chars = chars_;
+ width = get_width ();
+ columns = get_columns ();
+ overhead = get_chars_overhead (columns);
+ if (row) items.push (row);
+ }
+
+ bool is_empty () const
+ { return !items; }
+
+ static hb_vector_t<uint8_t> get_row_chars (const hb_vector_t<int>& row)
+ {
+ hb_vector_t<uint8_t> ret;
+ if (!ret.alloc (row.length)) return ret;
+
+ bool long_words = false;
+
+ /* 0/1/2 byte encoding */
+ for (int v: row)
+ {
+ if (v == 0)
+ ret.push (0);
+ else if (v > 32767 || v < -32768)
+ {
+ long_words = true;
+ break;
+ }
+ else if (v > 127 || v < -128)
+ ret.push (2);
+ else
+ ret.push (1);
+ }
+
+ if (!long_words)
+ return ret;
+
+ /* redo, 0/2/4 bytes encoding */
+ ret.reset ();
+ for (int v: row)
+ {
+ if (v == 0)
+ ret.push (0);
+ else if (v > 32767 || v < -32768)
+ ret.push (4);
+ else
+ ret.push (2);
+ }
+ return ret;
+ }
+
+ inline unsigned get_width ()
+ {
+ unsigned ret = + hb_iter (chars)
+ | hb_reduce (hb_add, 0u)
+ ;
+ return ret;
+ }
+
+ hb_vector_t<uint8_t> get_columns ()
+ {
+ hb_vector_t<uint8_t> cols;
+ cols.alloc (chars.length);
+ for (auto v : chars)
+ {
+ uint8_t flag = v ? 1 : 0;
+ cols.push (flag);
+ }
+ return cols;
+ }
+
+ static inline unsigned get_chars_overhead (const hb_vector_t<uint8_t>& cols)
+ {
+ unsigned c = 4 + 6; // 4 bytes for LOffset, 6 bytes for VarData header
+ unsigned cols_bit_count = 0;
+ for (auto v : cols)
+ if (v) cols_bit_count++;
+ return c + cols_bit_count * 2;
+ }
+
+ unsigned get_gain () const
+ {
+ int count = items.length;
+ return hb_max (0, (int) overhead - count);
+ }
+
+ int gain_from_merging (const delta_row_encoding_t& other_encoding) const
+ {
+ int combined_width = 0;
+ for (unsigned i = 0; i < chars.length; i++)
+ combined_width += hb_max (chars.arrayZ[i], other_encoding.chars.arrayZ[i]);
+
+ hb_vector_t<uint8_t> combined_columns;
+ combined_columns.alloc (columns.length);
+ for (unsigned i = 0; i < columns.length; i++)
+ combined_columns.push (columns.arrayZ[i] | other_encoding.columns.arrayZ[i]);
+
+ int combined_overhead = get_chars_overhead (combined_columns);
+ int combined_gain = (int) overhead + (int) other_encoding.overhead - combined_overhead
+ - (combined_width - (int) width) * items.length
+ - (combined_width - (int) other_encoding.width) * other_encoding.items.length;
+
+ return combined_gain;
+ }
+
+ static int cmp (const void *pa, const void *pb)
+ {
+ const delta_row_encoding_t *a = (const delta_row_encoding_t *)pa;
+ const delta_row_encoding_t *b = (const delta_row_encoding_t *)pb;
+
+ int gain_a = a->get_gain ();
+ int gain_b = b->get_gain ();
+
+ if (gain_a != gain_b)
+ return gain_a - gain_b;
+
+ return (b->chars).as_array ().cmp ((a->chars).as_array ());
+ }
+
+ static int cmp_width (const void *pa, const void *pb)
+ {
+ const delta_row_encoding_t *a = (const delta_row_encoding_t *)pa;
+ const delta_row_encoding_t *b = (const delta_row_encoding_t *)pb;
+
+ if (a->width != b->width)
+ return (int) a->width - (int) b->width;
+
+ return (b->chars).as_array ().cmp ((a->chars).as_array ());
+ }
+
+ bool add_row (const hb_vector_t<int>* row)
+ { return items.push (row); }
+};
+
struct VarRegionAxis
{
float evaluate (int coord) const
@@ -2334,6 +2484,12 @@
* have to do that at runtime. */
}
+ bool serialize (hb_serialize_context_t *c) const
+ {
+ TRACE_SERIALIZE (this);
+ return_trace (c->embed (this));
+ }
+
public:
F2DOT14 startCoord;
F2DOT14 peakCoord;
@@ -2391,6 +2547,47 @@
return_trace (c->check_struct (this) && axesZ.sanitize (c, axisCount * regionCount));
}
+ bool serialize (hb_serialize_context_t *c,
+ const hb_vector_t<hb_tag_t>& axis_tags,
+ const hb_vector_t<const hb_hashmap_t<hb_tag_t, Triple>*>& regions)
+ {
+ TRACE_SERIALIZE (this);
+ unsigned axis_count = axis_tags.length;
+ unsigned region_count = regions.length;
+ if (!axis_count || !region_count) return_trace (false);
+ if (unlikely (hb_unsigned_mul_overflows (axis_count * region_count,
+ VarRegionAxis::static_size))) return_trace (false);
+ if (unlikely (!c->extend_min (this))) return_trace (false);
+ axisCount = axis_count;
+ regionCount = region_count;
+
+ for (unsigned r = 0; r < region_count; r++)
+ {
+ const auto& region = regions[r];
+ for (unsigned i = 0; i < axis_count; i++)
+ {
+ hb_tag_t tag = axis_tags.arrayZ[i];
+ VarRegionAxis var_region_rec;
+ Triple *coords;
+ if (region->has (tag, &coords))
+ {
+ var_region_rec.startCoord.set_float (coords->minimum);
+ var_region_rec.peakCoord.set_float (coords->middle);
+ var_region_rec.endCoord.set_float (coords->maximum);
+ }
+ else
+ {
+ var_region_rec.startCoord.set_int (0);
+ var_region_rec.peakCoord.set_int (0);
+ var_region_rec.endCoord.set_int (0);
+ }
+ if (!var_region_rec.serialize (c))
+ return_trace (false);
+ }
+ }
+ return_trace (true);
+ }
+
bool serialize (hb_serialize_context_t *c, const VarRegionList *src, const hb_inc_bimap_t ®ion_map)
{
TRACE_SERIALIZE (this);
@@ -2411,6 +2608,45 @@
return_trace (true);
}
+ bool get_var_region (unsigned region_index,
+ const hb_map_t& axes_old_index_tag_map,
+ hb_hashmap_t<hb_tag_t, Triple>& axis_tuples /* OUT */) const
+ {
+ if (region_index >= regionCount) return false;
+ const VarRegionAxis* axis_region = axesZ.arrayZ + (region_index * axisCount);
+ for (unsigned i = 0; i < axisCount; i++)
+ {
+ hb_tag_t *axis_tag;
+ if (!axes_old_index_tag_map.has (i, &axis_tag))
+ return false;
+
+ float min_val = axis_region->startCoord.to_float ();
+ float def_val = axis_region->peakCoord.to_float ();
+ float max_val = axis_region->endCoord.to_float ();
+
+ if (def_val != 0.f)
+ axis_tuples.set (*axis_tag, Triple (min_val, def_val, max_val));
+ axis_region++;
+ }
+ return !axis_tuples.in_error ();
+ }
+
+ bool get_var_regions (const hb_map_t& axes_old_index_tag_map,
+ hb_vector_t<hb_hashmap_t<hb_tag_t, Triple>>& regions /* OUT */) const
+ {
+ if (!regions.alloc (regionCount))
+ return false;
+
+ for (unsigned i = 0; i < regionCount; i++)
+ {
+ hb_hashmap_t<hb_tag_t, Triple> axis_tuples;
+ if (!get_var_region (i, axes_old_index_tag_map, axis_tuples))
+ return false;
+ regions.push (std::move (axis_tuples));
+ }
+ return !regions.in_error ();
+ }
+
unsigned int get_size () const { return min_size + VarRegionAxis::static_size * axisCount * regionCount; }
public:
@@ -2430,6 +2666,9 @@
unsigned int get_region_index_count () const
{ return regionIndices.len; }
+
+ unsigned get_region_index (unsigned i) const
+ { return i >= regionIndices.len ? -1 : regionIndices[i]; }
unsigned int get_row_size () const
{ return (wordCount () + regionIndices.len) * (longWords () ? 2 : 1); }
@@ -2506,6 +2745,81 @@
}
bool serialize (hb_serialize_context_t *c,
+ bool has_long,
+ const hb_vector_t<const hb_vector_t<int>*>& rows)
+ {
+ TRACE_SERIALIZE (this);
+ if (unlikely (!c->extend_min (this))) return_trace (false);
+ unsigned row_count = rows.length;
+ itemCount = row_count;
+
+ int min_threshold = has_long ? -65536 : -128;
+ int max_threshold = has_long ? +65535 : +127;
+ enum delta_size_t { kZero=0, kNonWord, kWord };
+ hb_vector_t<delta_size_t> delta_sz;
+ unsigned num_regions = rows[0]->length;
+ if (!delta_sz.resize (num_regions))
+ return_trace (false);
+
+ unsigned word_count = 0;
+ for (unsigned r = 0; r < num_regions; r++)
+ {
+ for (unsigned i = 0; i < row_count; i++)
+ {
+ int delta = rows[i]->arrayZ[r];
+ if (delta < min_threshold || delta > max_threshold)
+ {
+ delta_sz[r] = kWord;
+ word_count++;
+ break;
+ }
+ else if (delta != 0)
+ {
+ delta_sz[r] = kNonWord;
+ }
+ }
+ }
+
+ /* reorder regions: words and then non-words*/
+ unsigned word_index = 0;
+ unsigned non_word_index = word_count;
+ hb_map_t ri_map;
+ for (unsigned r = 0; r < num_regions; r++)
+ {
+ if (!delta_sz[r]) continue;
+ unsigned new_r = (delta_sz[r] == kWord)? word_index++ : non_word_index++;
+ if (!ri_map.set (new_r, r))
+ return_trace (false);
+ }
+
+ wordSizeCount = word_count | (has_long ? 0x8000u /* LONG_WORDS */ : 0);
+
+ unsigned ri_count = ri_map.get_population ();
+ regionIndices.len = ri_count;
+ if (unlikely (!c->extend (this))) return_trace (false);
+
+ for (unsigned r = 0; r < ri_count; r++)
+ {
+ hb_codepoint_t *idx;
+ if (!ri_map.has (r, &idx))
+ return_trace (false);
+ regionIndices[r] = *idx;
+ }
+
+ HBUINT8 *delta_bytes = get_delta_bytes ();
+ unsigned row_size = get_row_size ();
+ for (unsigned int i = 0; i < row_count; i++)
+ {
+ for (unsigned int r = 0; r < ri_count; r++)
+ {
+ int delta = rows[i]->arrayZ[ri_map[r]];
+ set_item_delta_fast (i, r, delta, delta_bytes, row_size);
+ }
+ }
+ return_trace (true);
+ }
+
+ bool serialize (hb_serialize_context_t *c,
const VarData *src,
const hb_inc_bimap_t &inner_map,
const hb_inc_bimap_t ®ion_map)
@@ -2625,13 +2939,15 @@
}
}
- protected:
+ public:
const HBUINT8 *get_delta_bytes () const
{ return &StructAfter<HBUINT8> (regionIndices); }
+ protected:
HBUINT8 *get_delta_bytes ()
{ return &StructAfter<HBUINT8> (regionIndices); }
+ public:
int32_t get_item_delta_fast (unsigned int item, unsigned int region,
const HBUINT8 *delta_bytes, unsigned row_size) const
{
@@ -2662,6 +2978,7 @@
get_row_size ());
}
+ protected:
void set_item_delta_fast (unsigned int item, unsigned int region, int32_t delta,
HBUINT8 *delta_bytes, unsigned row_size)
{
@@ -2704,6 +3021,7 @@
struct VariationStore
{
+ friend struct item_variations_t;
using cache_t = VarRegionList::cache_t;
cache_t *create_cache () const
@@ -2775,6 +3093,36 @@
}
bool serialize (hb_serialize_context_t *c,
+ bool has_long,
+ const hb_vector_t<hb_tag_t>& axis_tags,
+ const hb_vector_t<const hb_hashmap_t<hb_tag_t, Triple>*>& region_list,
+ const hb_vector_t<delta_row_encoding_t>& vardata_encodings)
+ {
+ TRACE_SERIALIZE (this);
+#ifdef HB_NO_VAR
+ return_trace (false);
+#endif
+ if (unlikely (!c->extend_min (this))) return_trace (false);
+
+ format = 1;
+ if (!regions.serialize_serialize (c, axis_tags, region_list))
+ return_trace (false);
+
+ unsigned num_var_data = vardata_encodings.length;
+ if (!num_var_data) return_trace (false);
+ if (unlikely (!c->check_assign (dataSets.len, num_var_data,
+ HB_SERIALIZE_ERROR_INT_OVERFLOW)))
+ return_trace (false);
+
+ if (unlikely (!c->extend (dataSets))) return_trace (false);
+ for (unsigned i = 0; i < num_var_data; i++)
+ if (!dataSets[i].serialize_serialize (c, has_long, vardata_encodings[i].items))
+ return_trace (false);
+
+ return_trace (true);
+ }
+
+ bool serialize (hb_serialize_context_t *c,
const VariationStore *src,
const hb_array_t <const hb_inc_bimap_t> &inner_maps)
{
@@ -2903,6 +3251,22 @@
return dataSets.len;
}
+ const VarData& get_sub_table (unsigned i) const
+ {
+#ifdef HB_NO_VAR
+ return Null (VarData);
+#endif
+ return this+dataSets[i];
+ }
+
+ const VarRegionList& get_region_list () const
+ {
+#ifdef HB_NO_VAR
+ return Null (VarRegionList);
+#endif
+ return this+regions;
+ }
+
protected:
HBUINT16 format;
Offset32To<VarRegionList> regions;
diff --git a/src/hb-ot-os2-table.hh b/src/hb-ot-os2-table.hh
index 72cb662..19330b9 100644
--- a/src/hb-ot-os2-table.hh
+++ b/src/hb-ot-os2-table.hh
@@ -246,24 +246,19 @@
}
#endif
- if (c->plan->user_axes_location.has (HB_TAG ('w','g','h','t')) &&
- !c->plan->pinned_at_default)
+ Triple *axis_range;
+ if (c->plan->user_axes_location.has (HB_TAG ('w','g','h','t'), &axis_range))
{
- float weight_class = c->plan->user_axes_location.get (HB_TAG ('w','g','h','t')).middle;
- if (!c->serializer->check_assign (os2_prime->usWeightClass,
- roundf (hb_clamp (weight_class, 1.0f, 1000.0f)),
- HB_SERIALIZE_ERROR_INT_OVERFLOW))
- return_trace (false);
+ unsigned weight_class = static_cast<unsigned> (roundf (hb_clamp (axis_range->middle, 1.0f, 1000.0f)));
+ if (os2_prime->usWeightClass != weight_class)
+ os2_prime->usWeightClass = weight_class;
}
- if (c->plan->user_axes_location.has (HB_TAG ('w','d','t','h')) &&
- !c->plan->pinned_at_default)
+ if (c->plan->user_axes_location.has (HB_TAG ('w','d','t','h'), &axis_range))
{
- float width = c->plan->user_axes_location.get (HB_TAG ('w','d','t','h')).middle;
- if (!c->serializer->check_assign (os2_prime->usWidthClass,
- roundf (map_wdth_to_widthclass (width)),
- HB_SERIALIZE_ERROR_INT_OVERFLOW))
- return_trace (false);
+ unsigned width_class = static_cast<unsigned> (roundf (map_wdth_to_widthclass (axis_range->middle)));
+ if (os2_prime->usWidthClass != width_class)
+ os2_prime->usWidthClass = width_class;
}
if (c->plan->flags & HB_SUBSET_FLAGS_NO_PRUNE_UNICODE_RANGES)
diff --git a/src/hb-ot-post-table.hh b/src/hb-ot-post-table.hh
index 761e49d..aaecc34 100644
--- a/src/hb-ot-post-table.hh
+++ b/src/hb-ot-post-table.hh
@@ -113,12 +113,12 @@
}
#endif
- if (c->plan->user_axes_location.has (HB_TAG ('s','l','n','t')) &&
- !c->plan->pinned_at_default)
+ Triple *axis_range;
+ if (c->plan->user_axes_location.has (HB_TAG ('s','l','n','t'), &axis_range))
{
- float italic_angle = c->plan->user_axes_location.get (HB_TAG ('s','l','n','t')).middle;
- italic_angle = hb_max (-90.f, hb_min (italic_angle, 90.f));
- post_prime->italicAngle.set_float (italic_angle);
+ float italic_angle = hb_max (-90.f, hb_min (axis_range->middle, 90.f));
+ if (post_prime->italicAngle.to_float () != italic_angle)
+ post_prime->italicAngle.set_float (italic_angle);
}
if (glyph_names && version.major == 2)
diff --git a/src/hb-ot-var-common.hh b/src/hb-ot-var-common.hh
index 23d7f7c..aeb0097 100644
--- a/src/hb-ot-var-common.hh
+++ b/src/hb-ot-var-common.hh
@@ -27,6 +27,7 @@
#define HB_OT_VAR_COMMON_HH
#include "hb-ot-layout-common.hh"
+#include "hb-priority-queue.hh"
namespace OT {
@@ -1171,14 +1172,69 @@
return true;
}
+ bool create_from_item_var_data (const VarData &var_data,
+ const hb_vector_t<hb_hashmap_t<hb_tag_t, Triple>>& regions,
+ const hb_map_t& axes_old_index_tag_map)
+ {
+ /* NULL offset, to keep original varidx valid, just return */
+ if (&var_data == &Null (VarData))
+ return true;
+
+ unsigned num_regions = var_data.get_region_index_count ();
+ if (!tuple_vars.alloc (num_regions)) return false;
+
+ unsigned item_count = var_data.get_item_count ();
+ unsigned row_size = var_data.get_row_size ();
+ const HBUINT8 *delta_bytes = var_data.get_delta_bytes ();
+
+ for (unsigned r = 0; r < num_regions; r++)
+ {
+ /* In VarData, deltas are organized in rows, convert them into
+ * column(region) based tuples, resize deltas_x first */
+ tuple_delta_t tuple;
+ if (!tuple.deltas_x.resize (item_count, false) ||
+ !tuple.indices.resize (item_count, false))
+ return false;
+
+ for (unsigned i = 0; i < item_count; i++)
+ {
+ tuple.indices.arrayZ[i] = true;
+ tuple.deltas_x.arrayZ[i] = var_data.get_item_delta_fast (i, r, delta_bytes, row_size);
+ }
+
+ unsigned region_index = var_data.get_region_index (r);
+ if (region_index >= regions.length) return false;
+ tuple.axis_tuples = regions.arrayZ[region_index];
+
+ tuple_vars.push (std::move (tuple));
+ }
+ return !tuple_vars.in_error ();
+ }
+
private:
- void change_tuple_variations_axis_limits (const hb_hashmap_t<hb_tag_t, Triple>& normalized_axes_location,
+ static int _cmp_axis_tag (const void *pa, const void *pb)
+ {
+ const hb_tag_t *a = (const hb_tag_t*) pa;
+ const hb_tag_t *b = (const hb_tag_t*) pb;
+ return (int)(*a) - (int)(*b);
+ }
+
+ bool change_tuple_variations_axis_limits (const hb_hashmap_t<hb_tag_t, Triple>& normalized_axes_location,
const hb_hashmap_t<hb_tag_t, TripleDistances>& axes_triple_distances)
{
- for (auto _ : normalized_axes_location)
+ /* sort axis_tag/axis_limits, make result deterministic */
+ hb_vector_t<hb_tag_t> axis_tags;
+ if (!axis_tags.alloc (normalized_axes_location.get_population ()))
+ return false;
+ for (auto t : normalized_axes_location.keys ())
+ axis_tags.push (t);
+
+ axis_tags.qsort (_cmp_axis_tag);
+ for (auto axis_tag : axis_tags)
{
- hb_tag_t axis_tag = _.first;
- Triple axis_limit = _.second;
+ Triple *axis_limit;
+ if (!normalized_axes_location.has (axis_tag, &axis_limit))
+ return false;
TripleDistances axis_triple_distances{1.f, 1.f};
if (axes_triple_distances.has (axis_tag))
axis_triple_distances = axes_triple_distances.get (axis_tag);
@@ -1186,12 +1242,13 @@
hb_vector_t<tuple_delta_t> new_vars;
for (const tuple_delta_t& var : tuple_vars)
{
- hb_vector_t<tuple_delta_t> out = var.change_tuple_var_axis_limit (axis_tag, axis_limit, axis_triple_distances);
+ hb_vector_t<tuple_delta_t> out = var.change_tuple_var_axis_limit (axis_tag, *axis_limit, axis_triple_distances);
if (!out) continue;
+
unsigned new_len = new_vars.length + out.length;
if (unlikely (!new_vars.alloc (new_len, false)))
- { fini (); return;}
+ { fini (); return false;}
for (unsigned i = 0; i < out.length; i++)
new_vars.push (std::move (out[i]));
@@ -1199,6 +1256,7 @@
tuple_vars.fini ();
tuple_vars = std::move (new_vars);
}
+ return true;
}
/* merge tuple variations with overlapping tents */
@@ -1382,7 +1440,8 @@
contour_point_vector_t* contour_points = nullptr)
{
if (!tuple_vars) return true;
- change_tuple_variations_axis_limits (normalized_axes_location, axes_triple_distances);
+ if (!change_tuple_variations_axis_limits (normalized_axes_location, axes_triple_distances))
+ return false;
/* compute inferred deltas only for gvar */
if (contour_points)
if (!calc_inferred_deltas (*contour_points))
@@ -1705,6 +1764,394 @@
DEFINE_SIZE_MIN (4);
};
+using tuple_variations_t = TupleVariationData::tuple_variations_t;
+struct item_variations_t
+{
+ using region_t = const hb_hashmap_t<hb_tag_t, Triple>*;
+ private:
+ /* each subtable is decompiled into a tuple_variations_t, in which all tuples
+ * have the same num of deltas (rows) */
+ hb_vector_t<tuple_variations_t> vars;
+
+ /* original region list, decompiled from item varstore, used when rebuilding
+ * region list after instantiation */
+ hb_vector_t<hb_hashmap_t<hb_tag_t, Triple>> orig_region_list;
+
+ /* region list: vector of Regions, maintain the original order for the regions
+ * that existed before instantiate (), append the new regions at the end.
+ * Regions are stored in each tuple already, save pointers only.
+ * When converting back to item varstore, unused regions will be pruned */
+ hb_vector_t<region_t> region_list;
+
+ /* region -> idx map after instantiation and pruning unused regions */
+ hb_hashmap_t<region_t, unsigned> region_map;
+
+ /* all delta rows after instantiation */
+ hb_vector_t<hb_vector_t<int>> delta_rows;
+ /* final optimized vector of encoding objects used to assemble the varstore */
+ hb_vector_t<delta_row_encoding_t> encodings;
+
+ /* old varidxes -> new var_idxes map */
+ hb_map_t varidx_map;
+
+ /* has long words */
+ bool has_long = false;
+
+ public:
+ bool has_long_word () const
+ { return has_long; }
+
+ const hb_vector_t<region_t>& get_region_list () const
+ { return region_list; }
+
+ const hb_vector_t<delta_row_encoding_t>& get_vardata_encodings () const
+ { return encodings; }
+
+ const hb_map_t& get_varidx_map () const
+ { return varidx_map; }
+
+ bool create_from_item_varstore (const VariationStore& varStore,
+ const hb_map_t& axes_old_index_tag_map)
+ {
+ const VarRegionList& regionList = varStore.get_region_list ();
+ if (!regionList.get_var_regions (axes_old_index_tag_map, orig_region_list))
+ return false;
+
+ unsigned num_var_data = varStore.get_sub_table_count ();
+ if (!vars.alloc (num_var_data)) return false;
+
+ for (unsigned i = 0; i < num_var_data; i++)
+ {
+ tuple_variations_t var_data_tuples;
+ if (!var_data_tuples.create_from_item_var_data (varStore.get_sub_table (i),
+ orig_region_list,
+ axes_old_index_tag_map))
+ return false;
+
+ vars.push (std::move (var_data_tuples));
+ }
+ return !vars.in_error ();
+ }
+
+ bool instantiate (const hb_hashmap_t<hb_tag_t, Triple>& normalized_axes_location,
+ const hb_hashmap_t<hb_tag_t, TripleDistances>& axes_triple_distances)
+ {
+ for (tuple_variations_t& tuple_vars : vars)
+ if (!tuple_vars.instantiate (normalized_axes_location, axes_triple_distances))
+ return false;
+
+ if (!build_region_list ()) return false;
+ return true;
+ }
+
+ bool build_region_list ()
+ {
+ /* scan all tuples and collect all unique regions, prune unused regions */
+ hb_hashmap_t<region_t, unsigned> all_regions;
+ hb_hashmap_t<region_t, unsigned> used_regions;
+
+ /* use a vector when inserting new regions, make result deterministic */
+ hb_vector_t<region_t> all_unique_regions;
+ for (const tuple_variations_t& sub_table : vars)
+ {
+ for (const tuple_delta_t& tuple : sub_table.tuple_vars)
+ {
+ region_t r = &(tuple.axis_tuples);
+ if (!used_regions.has (r))
+ {
+ bool all_zeros = true;
+ for (float d : tuple.deltas_x)
+ {
+ int delta = (int) roundf (d);
+ if (delta != 0)
+ {
+ all_zeros = false;
+ break;
+ }
+ }
+ if (!all_zeros)
+ {
+ if (!used_regions.set (r, 1))
+ return false;
+ }
+ }
+ if (all_regions.has (r))
+ continue;
+ if (!all_regions.set (r, 1))
+ return false;
+ all_unique_regions.push (r);
+ }
+ }
+
+ if (!all_regions || !all_unique_regions) return false;
+ if (!region_list.alloc (all_regions.get_population ()))
+ return false;
+
+ unsigned idx = 0;
+ /* append the original regions that pre-existed */
+ for (const auto& r : orig_region_list)
+ {
+ if (!all_regions.has (&r) || !used_regions.has (&r))
+ continue;
+
+ region_list.push (&r);
+ if (!region_map.set (&r, idx))
+ return false;
+ all_regions.del (&r);
+ idx++;
+ }
+
+ /* append the new regions at the end */
+ for (const auto& r: all_unique_regions)
+ {
+ if (!all_regions.has (r) || !used_regions.has (r))
+ continue;
+ region_list.push (r);
+ if (!region_map.set (r, idx))
+ return false;
+ all_regions.del (r);
+ idx++;
+ }
+ return (!region_list.in_error ()) && (!region_map.in_error ());
+ }
+
+ /* main algorithm ported from fonttools VarStore_optimize() method */
+ bool optimize (bool use_no_variation_idx=true)
+ {
+ unsigned num_cols = region_list.length;
+ /* pre-alloc a 2D vector for all sub_table's VarData rows */
+ unsigned total_rows = 0;
+ for (unsigned major = 0; major < vars.length; major++)
+ {
+ const tuple_variations_t& tuples = vars[major];
+ /* all tuples in each sub_table should have same num of deltas(num rows) */
+ total_rows += tuples.tuple_vars[0].deltas_x.length;
+ }
+
+ if (!delta_rows.resize (total_rows)) return false;
+ /* init all rows to [0]*num_cols */
+ for (unsigned i = 0; i < total_rows; i++)
+ if (!(delta_rows[i].resize (num_cols))) return false;
+
+ /* old VarIdxes -> full encoding_row mapping */
+ hb_hashmap_t<unsigned, const hb_vector_t<int>*> front_mapping;
+ unsigned start_row = 0;
+ hb_vector_t<delta_row_encoding_t> encoding_objs;
+ hb_hashmap_t<hb_vector_t<uint8_t>, unsigned> chars_idx_map;
+
+ /* delta_rows map, used for filtering out duplicate rows */
+ hb_hashmap_t<const hb_vector_t<int>*, unsigned> delta_rows_map;
+ for (unsigned major = 0; major < vars.length; major++)
+ {
+ /* deltas are stored in tuples(column based), convert them back into items
+ * (row based) delta */
+ const tuple_variations_t& tuples = vars[major];
+ unsigned num_rows = tuples.tuple_vars[0].deltas_x.length;
+ for (const tuple_delta_t& tuple: tuples.tuple_vars)
+ {
+ if (tuple.deltas_x.length != num_rows)
+ return false;
+
+ /* skip unused regions */
+ unsigned *col_idx;
+ if (!region_map.has (&(tuple.axis_tuples), &col_idx))
+ continue;
+
+ for (unsigned i = 0; i < num_rows; i++)
+ {
+ int rounded_delta = roundf (tuple.deltas_x[i]);
+ delta_rows[start_row + i][*col_idx] += rounded_delta;
+ if ((!has_long) && (rounded_delta < -65536 || rounded_delta > 65535))
+ has_long = true;
+ }
+ }
+
+ for (unsigned minor = 0; minor < num_rows; minor++)
+ {
+ const hb_vector_t<int>& row = delta_rows[start_row + minor];
+ if (use_no_variation_idx)
+ {
+ bool all_zeros = true;
+ for (int delta : row)
+ {
+ if (delta != 0)
+ {
+ all_zeros = false;
+ break;
+ }
+ }
+ if (all_zeros)
+ continue;
+ }
+
+ if (!front_mapping.set ((major<<16) + minor, &row))
+ return false;
+
+ hb_vector_t<uint8_t> chars = delta_row_encoding_t::get_row_chars (row);
+ if (!chars) return false;
+
+ if (delta_rows_map.has (&row))
+ continue;
+
+ delta_rows_map.set (&row, 1);
+ unsigned *obj_idx;
+ if (chars_idx_map.has (chars, &obj_idx))
+ {
+ delta_row_encoding_t& obj = encoding_objs[*obj_idx];
+ if (!obj.add_row (&row))
+ return false;
+ }
+ else
+ {
+ delta_row_encoding_t obj (std::move (chars), &row);
+ encoding_objs.push (std::move (obj));
+ if (!chars_idx_map.set (chars, encoding_objs.length - 1))
+ return false;
+ }
+ }
+
+ start_row += num_rows;
+ }
+ /* sort encoding_objs */
+ encoding_objs.qsort ();
+
+ /* main algorithm: repeatedly pick 2 best encodings to combine, and combine
+ * them */
+ hb_priority_queue_t queue;
+ unsigned num_todos = encoding_objs.length;
+ for (unsigned i = 0; i < num_todos; i++)
+ {
+ for (unsigned j = i + 1; j < num_todos; j++)
+ {
+ int combining_gain = encoding_objs.arrayZ[i].gain_from_merging (encoding_objs.arrayZ[j]);
+ if (combining_gain > 0)
+ {
+ unsigned val = (i << 16) + j;
+ queue.insert (-combining_gain, val);
+ }
+ }
+ }
+
+ hb_set_t removed_todo_idxes;
+ while (queue)
+ {
+ unsigned val = queue.pop_minimum ().second;
+ unsigned j = val & 0xFFFF;
+ unsigned i = (val >> 16) & 0xFFFF;
+
+ if (removed_todo_idxes.has (i) || removed_todo_idxes.has (j))
+ continue;
+
+ delta_row_encoding_t& encoding = encoding_objs.arrayZ[i];
+ delta_row_encoding_t& other_encoding = encoding_objs.arrayZ[j];
+
+ removed_todo_idxes.add (i);
+ removed_todo_idxes.add (j);
+
+ hb_vector_t<uint8_t> combined_chars;
+ if (!combined_chars.alloc (encoding.chars.length))
+ return false;
+
+ for (unsigned idx = 0; idx < encoding.chars.length; idx++)
+ {
+ uint8_t v = hb_max (encoding.chars.arrayZ[idx], other_encoding.chars.arrayZ[idx]);
+ combined_chars.push (v);
+ }
+
+ delta_row_encoding_t combined_encoding_obj (std::move (combined_chars));
+ for (const auto& row : hb_concat (encoding.items, other_encoding.items))
+ combined_encoding_obj.add_row (row);
+
+ for (unsigned idx = 0; idx < encoding_objs.length; idx++)
+ {
+ if (removed_todo_idxes.has (idx)) continue;
+
+ const delta_row_encoding_t& obj = encoding_objs.arrayZ[idx];
+ if (obj.chars == combined_chars)
+ {
+ for (const auto& row : obj.items)
+ combined_encoding_obj.add_row (row);
+
+ removed_todo_idxes.add (idx);
+ continue;
+ }
+
+ int combined_gain = combined_encoding_obj.gain_from_merging (obj);
+ if (combined_gain > 0)
+ {
+ unsigned val = (idx << 16) + encoding_objs.length;
+ queue.insert (-combined_gain, val);
+ }
+ }
+
+ encoding_objs.push (std::move (combined_encoding_obj));
+ }
+
+ int num_final_encodings = (int) encoding_objs.length - (int) removed_todo_idxes.get_population ();
+ if (num_final_encodings <= 0) return false;
+
+ if (!encodings.alloc (num_final_encodings)) return false;
+ for (unsigned i = 0; i < encoding_objs.length; i++)
+ {
+ if (removed_todo_idxes.has (i)) continue;
+ encodings.push (std::move (encoding_objs.arrayZ[i]));
+ }
+
+ /* sort again based on width, make result deterministic */
+ encodings.qsort (delta_row_encoding_t::cmp_width);
+
+ return compile_varidx_map (front_mapping);
+ }
+
+ private:
+ /* compile varidx_map for one VarData subtable (index specified by major) */
+ bool compile_varidx_map (const hb_hashmap_t<unsigned, const hb_vector_t<int>*>& front_mapping)
+ {
+ /* full encoding_row -> new VarIdxes mapping */
+ hb_hashmap_t<const hb_vector_t<int>*, unsigned> back_mapping;
+
+ for (unsigned major = 0; major < encodings.length; major++)
+ {
+ delta_row_encoding_t& encoding = encodings[major];
+ /* just sanity check, this shouldn't happen */
+ if (encoding.is_empty ())
+ return false;
+
+ unsigned num_rows = encoding.items.length;
+
+ /* sort rows, make result deterministic */
+ encoding.items.qsort (_cmp_row);
+
+ /* compile old to new var_idxes mapping */
+ for (unsigned minor = 0; minor < num_rows; minor++)
+ {
+ unsigned new_varidx = (major << 16) + minor;
+ back_mapping.set (encoding.items.arrayZ[minor], new_varidx);
+ }
+ }
+
+ for (auto _ : front_mapping.iter ())
+ {
+ unsigned old_varidx = _.first;
+ unsigned *new_varidx;
+ if (back_mapping.has (_.second, &new_varidx))
+ varidx_map.set (old_varidx, *new_varidx);
+ else
+ varidx_map.set (old_varidx, HB_OT_LAYOUT_NO_VARIATIONS_INDEX);
+ }
+ return !varidx_map.in_error ();
+ }
+
+ static int _cmp_row (const void *pa, const void *pb)
+ {
+ /* compare pointers of vectors(const hb_vector_t<int>*) that represent a row */
+ const hb_vector_t<int>** a = (const hb_vector_t<int>**) pa;
+ const hb_vector_t<int>** b = (const hb_vector_t<int>**) pb;
+
+ return ((*b)->as_array ()).cmp ((*a)->as_array ());
+ }
+};
+
} /* namespace OT */
diff --git a/src/hb-ot-var-mvar-table.hh b/src/hb-ot-var-mvar-table.hh
index d27ebb3..a41e578 100644
--- a/src/hb-ot-var-mvar-table.hh
+++ b/src/hb-ot-var-mvar-table.hh
@@ -27,7 +27,7 @@
#ifndef HB_OT_VAR_MVAR_TABLE_HH
#define HB_OT_VAR_MVAR_TABLE_HH
-#include "hb-ot-layout-common.hh"
+#include "hb-ot-var-common.hh"
namespace OT {
@@ -41,6 +41,19 @@
return_trace (c->check_struct (this));
}
+ bool subset (hb_subset_context_t *c,
+ const hb_map_t& varidx_map) const
+ {
+ TRACE_SUBSET (this);
+ auto *out = c->serializer->embed (*this);
+ if (unlikely (!out)) return_trace (false);
+
+ hb_codepoint_t *new_idx;
+ return_trace (c->serializer->check_assign (out->varIdx,
+ (varidx_map.has (varIdx, &new_idx)) ? *new_idx : HB_OT_LAYOUT_NO_VARIATIONS_INDEX,
+ HB_SERIALIZE_ERROR_INT_OVERFLOW));
+ }
+
public:
Tag valueTag; /* Four-byte tag identifying a font-wide measure. */
VarIdx varIdx; /* Outer/inner index into VariationStore item. */
@@ -73,6 +86,52 @@
valueRecordSize));
}
+ bool subset (hb_subset_context_t *c) const
+ {
+ TRACE_SUBSET (this);
+#ifdef HB_NO_VAR
+ return_trace (false);
+#endif
+
+ if (c->plan->all_axes_pinned)
+ return_trace (false);
+
+ MVAR *out = c->serializer->start_embed (*this);
+ if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
+ out->version = version;
+ out->reserved = reserved;
+ out->valueRecordSize = valueRecordSize;
+ out->valueRecordCount = valueRecordCount;
+
+ item_variations_t item_vars;
+ const VariationStore& src_var_store = this+varStore;
+ if (!item_vars.create_from_item_varstore (src_var_store, c->plan->axes_old_index_tag_map))
+ return_trace (false);
+
+ if (!item_vars.instantiate (c->plan->axes_location, c->plan->axes_triple_distances))
+ return_trace (false);
+
+ if (!item_vars.optimize ())
+ return_trace (false);
+
+ /* serialize varstore */
+ if (!out->varStore.serialize_serialize (c->serializer, item_vars.has_long_word (),
+ c->plan->axis_tags,
+ item_vars.get_region_list (),
+ item_vars.get_vardata_encodings ()))
+ return_trace (false);
+
+ /* serialize value records array */
+ unsigned value_rec_count = valueRecordCount;
+ const VariationValueRecord *record = reinterpret_cast<const VariationValueRecord*> (valuesZ.arrayZ);
+ for (unsigned i = 0; i < value_rec_count; i++)
+ {
+ if (!record->subset (c, item_vars.get_varidx_map ())) return_trace (false);
+ record++;
+ }
+ return_trace (true);
+ }
+
float get_var (hb_tag_t tag,
const int *coords, unsigned int coord_count) const
{
diff --git a/src/hb-subset-input.cc b/src/hb-subset-input.cc
index aade558..0277d3d 100644
--- a/src/hb-subset-input.cc
+++ b/src/hb-subset-input.cc
@@ -74,7 +74,6 @@
HB_TAG ('p', 'r', 'e', 'p'),
HB_TAG ('V', 'D', 'M', 'X'),
HB_TAG ('D', 'S', 'I', 'G'),
- HB_TAG ('M', 'V', 'A', 'R'),
};
sets.no_subset_tables->add_array (default_no_subset_tables,
ARRAY_LENGTH (default_no_subset_tables));
diff --git a/src/hb-subset-plan-member-list.hh b/src/hb-subset-plan-member-list.hh
index 01d8b0f..46837ad 100644
--- a/src/hb-subset-plan-member-list.hh
+++ b/src/hb-subset-plan-member-list.hh
@@ -113,6 +113,8 @@
//axis_index->axis_tag mapping in fvar axis array
HB_SUBSET_PLAN_MEMBER (hb_map_t, axes_old_index_tag_map)
+//vector of retained axis tags in the order of axes given in the 'fvar' table
+HB_SUBSET_PLAN_MEMBER (hb_vector_t<hb_tag_t>, axis_tags)
//hmtx metrics map: new gid->(advance, lsb)
HB_SUBSET_PLAN_MEMBER (mutable hb_hashmap_t E(<hb_codepoint_t, hb_pair_t E(<unsigned, int>)>), hmtx_map)
diff --git a/src/hb-subset-plan.cc b/src/hb-subset-plan.cc
index c293ba3..7cb306e 100644
--- a/src/hb-subset-plan.cc
+++ b/src/hb-subset-plan.cc
@@ -927,6 +927,7 @@
{
axis_not_pinned = true;
plan->axes_index_map.set (old_axis_idx, new_axis_idx);
+ plan->axis_tags.push (axis_tag);
new_axis_idx++;
}
diff --git a/src/hb-subset.cc b/src/hb-subset.cc
index 100ce87..de3e876 100644
--- a/src/hb-subset.cc
+++ b/src/hb-subset.cc
@@ -55,6 +55,7 @@
#include "hb-ot-var-fvar-table.hh"
#include "hb-ot-var-gvar-table.hh"
#include "hb-ot-var-hvar-table.hh"
+#include "hb-ot-var-mvar-table.hh"
#include "hb-ot-math-table.hh"
#include "hb-ot-stat-table.hh"
#include "hb-repacker.hh"
@@ -523,6 +524,9 @@
case HB_OT_TAG_cvar:
if (plan->user_axes_location.is_empty ()) return _passthrough (plan, tag);
return _subset<const OT::cvar> (plan, buf);
+ case HB_OT_TAG_MVAR:
+ if (plan->user_axes_location.is_empty ()) return _passthrough (plan, tag);
+ return _subset<const OT::MVAR> (plan, buf);
case HB_OT_TAG_STAT:
if (!plan->user_axes_location.is_empty ()) return _subset<const OT::STAT> (plan, buf);
else return _passthrough (plan, tag);
diff --git a/src/meson.build b/src/meson.build
index 19f7cf2..d6c353e 100644
--- a/src/meson.build
+++ b/src/meson.build
@@ -705,6 +705,7 @@
'test-serialize': ['test-serialize.cc', 'hb-static.cc'],
'test-set': ['test-set.cc', 'hb-static.cc'],
'test-tuple-varstore': ['test-tuple-varstore.cc', 'hb-subset-instancer-solver.cc', 'hb-static.cc'],
+ 'test-item-varstore': ['test-item-varstore.cc', 'hb-subset-instancer-solver.cc', 'hb-static.cc'],
'test-use-table': 'test-use-table.cc',
'test-vector': ['test-vector.cc', 'hb-static.cc'],
}
diff --git a/src/test-item-varstore.cc b/src/test-item-varstore.cc
new file mode 100644
index 0000000..d1600d5
--- /dev/null
+++ b/src/test-item-varstore.cc
@@ -0,0 +1,66 @@
+/*
+ * Copyright © 2020 Google, Inc.
+ *
+ * This is part of HarfBuzz, a text shaping library.
+ *
+ * Permission is hereby granted, without written agreement and without
+ * license or royalty fees, to use, copy, modify, and distribute this
+ * software and its documentation for any purpose, provided that the
+ * above copyright notice and the following two paragraphs appear in
+ * all copies of this software.
+ *
+ * IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR
+ * DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES
+ * ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN
+ * IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
+ * DAMAGE.
+ *
+ * THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING,
+ * BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
+ * FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE PROVIDED HEREUNDER IS
+ * ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO
+ * PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
+ *
+ */
+#include "hb-ot-var-common.hh"
+#include "hb-ot-var-hvar-table.hh"
+// HVAR table data from SourceSerif4Variable-Roman_subset.otf
+const char hvar_data[] = "\x0\x1\x0\x0\x0\x0\x0\x14\x0\x0\x0\xc4\x0\x0\x0\x0\x0\x0\x0\x0\x0\x1\x0\x0\x0\x10\x0\x2\x0\x0\x0\x74\x0\x0\x0\x7a\x0\x2\x0\x8\xc0\x0\xc0\x0\x0\x0\x0\x0\x0\x0\x0\x0\x0\x0\x40\x0\x40\x0\x0\x0\x0\x0\x0\x0\x0\x0\x0\x0\x0\x0\xc0\x0\xc0\x0\x0\x0\x0\x0\x0\x0\x0\x0\x0\x0\x40\x0\x40\x0\xc0\x0\xc0\x0\x0\x0\xc0\x0\xc0\x0\x0\x0\xc0\x0\xc0\x0\x0\x0\x0\x0\x40\x0\x40\x0\x0\x0\x40\x0\x40\x0\xc0\x0\xc0\x0\x0\x0\x0\x0\x40\x0\x40\x0\x0\x0\x40\x0\x40\x0\x0\x1\x0\x0\x0\x0\x0\x4\x0\x0\x0\x8\x0\x0\x0\x1\x0\x2\x0\x3\x0\x4\x0\x5\x0\x6\x0\x7\xf9\xf\x2f\xbf\xfb\xfb\x35\xf9\x4\x4\xf3\xb4\xf2\xfb\x2e\xf3\x4\x4\xe\xad\xfa\x1\x1a\x1\x15\x22\x59\xd6\xe3\xf6\x6\xf5\x0\x1\x0\x5\x0\x4\x7\x5\x6";
+
+static void
+test_item_variations ()
+{
+ const OT::HVAR* hvar_table = reinterpret_cast<const OT::HVAR*> (hvar_data);
+
+ hb_tag_t axis_tag = HB_TAG ('w', 'g', 'h', 't');
+ hb_map_t axis_idx_tag_map;
+ axis_idx_tag_map.set (0, axis_tag);
+
+ axis_tag = HB_TAG ('o', 'p', 's', 'z');
+ axis_idx_tag_map.set (1, axis_tag);
+
+ OT::item_variations_t item_vars;
+ const OT::VariationStore& src_var_store = hvar_table+(hvar_table->varStore);
+ bool result = item_vars.create_from_item_varstore (src_var_store, axis_idx_tag_map);
+
+ assert (result);
+
+ /* partial instancing wght=300:800 */
+ hb_hashmap_t<hb_tag_t, Triple> normalized_axes_location;
+ normalized_axes_location.set (axis_tag, Triple (-0.512817f, 0.f, 0.700012f));
+
+ hb_hashmap_t<hb_tag_t, TripleDistances> axes_triple_distances;
+ axes_triple_distances.set (axis_tag, TripleDistances (200.f, 500.f));
+
+ result = item_vars.instantiate (normalized_axes_location, axes_triple_distances);
+ assert (result);
+ result = item_vars.optimize ();
+ assert (result);
+ assert (item_vars.get_region_list().length == 8);
+}
+
+int
+main (int argc, char **argv)
+{
+ test_item_variations ();
+}
diff --git a/test/subset/data/expected/mvar_partial_instance/NotoSans-VF.abc.no-tables-with-item-variations.retain-all-codepoint.wght=200-600,wdth=80-90,CTGR=20-60.ttf b/test/subset/data/expected/mvar_partial_instance/NotoSans-VF.abc.no-tables-with-item-variations.retain-all-codepoint.wght=200-600,wdth=80-90,CTGR=20-60.ttf
new file mode 100644
index 0000000..679e6c7
--- /dev/null
+++ b/test/subset/data/expected/mvar_partial_instance/NotoSans-VF.abc.no-tables-with-item-variations.retain-all-codepoint.wght=200-600,wdth=80-90,CTGR=20-60.ttf
Binary files differ
diff --git a/test/subset/data/expected/mvar_partial_instance/NotoSans-VF.abc.no-tables-with-item-variations.retain-all-codepoint.wght=300-600.ttf b/test/subset/data/expected/mvar_partial_instance/NotoSans-VF.abc.no-tables-with-item-variations.retain-all-codepoint.wght=300-600.ttf
new file mode 100644
index 0000000..3508c73
--- /dev/null
+++ b/test/subset/data/expected/mvar_partial_instance/NotoSans-VF.abc.no-tables-with-item-variations.retain-all-codepoint.wght=300-600.ttf
Binary files differ
diff --git a/test/subset/data/expected/mvar_partial_instance/NotoSans-VF.abc.no-tables-with-item-variations.retain-all-codepoint.wght=500-800.ttf b/test/subset/data/expected/mvar_partial_instance/NotoSans-VF.abc.no-tables-with-item-variations.retain-all-codepoint.wght=500-800.ttf
new file mode 100644
index 0000000..a14e79e
--- /dev/null
+++ b/test/subset/data/expected/mvar_partial_instance/NotoSans-VF.abc.no-tables-with-item-variations.retain-all-codepoint.wght=500-800.ttf
Binary files differ
diff --git a/test/subset/data/expected/update_def_wght/SourceSerifVariable-Roman.no-tables-with-item-variations.retain-all-codepoint.wght=300-600.ttf b/test/subset/data/expected/update_def_wght/SourceSerifVariable-Roman.no-tables-with-item-variations.retain-all-codepoint.wght=300-600.ttf
new file mode 100644
index 0000000..5f6b2a0
--- /dev/null
+++ b/test/subset/data/expected/update_def_wght/SourceSerifVariable-Roman.no-tables-with-item-variations.retain-all-codepoint.wght=300-600.ttf
Binary files differ
diff --git a/test/subset/data/expected/update_def_wght/SourceSerifVariable-Roman.no-tables-with-item-variations.retain-all-codepoint.wght=500-800.ttf b/test/subset/data/expected/update_def_wght/SourceSerifVariable-Roman.no-tables-with-item-variations.retain-all-codepoint.wght=500-800.ttf
new file mode 100644
index 0000000..edd4513
--- /dev/null
+++ b/test/subset/data/expected/update_def_wght/SourceSerifVariable-Roman.no-tables-with-item-variations.retain-all-codepoint.wght=500-800.ttf
Binary files differ
diff --git a/test/subset/data/profiles/no-tables-with-item-variations.txt b/test/subset/data/profiles/no-tables-with-item-variations.txt
index df2b423..2a5aa18 100644
--- a/test/subset/data/profiles/no-tables-with-item-variations.txt
+++ b/test/subset/data/profiles/no-tables-with-item-variations.txt
@@ -1 +1 @@
---drop-tables+=MVAR,HVAR,VVAR,GDEF,COLR,GPOS
+--drop-tables+=HVAR,VVAR,GDEF,COLR,GPOS
diff --git a/test/subset/data/tests/mvar_partial_instance.tests b/test/subset/data/tests/mvar_partial_instance.tests
new file mode 100644
index 0000000..edf9296
--- /dev/null
+++ b/test/subset/data/tests/mvar_partial_instance.tests
@@ -0,0 +1,13 @@
+FONTS:
+NotoSans-VF.abc.ttf
+
+PROFILES:
+no-tables-with-item-variations.txt
+
+SUBSETS:
+*
+
+INSTANCES:
+wght=300:600
+wght=500:800
+wght=200:600,wdth=80:90,CTGR=20:60
diff --git a/test/subset/data/tests/update_def_wght.tests b/test/subset/data/tests/update_def_wght.tests
new file mode 100644
index 0000000..9ff5fce
--- /dev/null
+++ b/test/subset/data/tests/update_def_wght.tests
@@ -0,0 +1,12 @@
+FONTS:
+SourceSerifVariable-Roman.ttf
+
+PROFILES:
+no-tables-with-item-variations.txt
+
+SUBSETS:
+*
+
+INSTANCES:
+wght=300:600
+wght=500:800
diff --git a/test/subset/meson.build b/test/subset/meson.build
index be4f69b..9c99423 100644
--- a/test/subset/meson.build
+++ b/test/subset/meson.build
@@ -68,7 +68,11 @@
]
if get_option('experimental_api')
- tests += 'glyf_partial_instancing'
+ tests += [
+ 'glyf_partial_instancing',
+ 'mvar_partial_instance',
+ 'update_def_wght',
+ ]
endif
repack_tests = [