scudo: Table driven size classes for Android allocator.

Add an optional table lookup after the existing logarithm computation
for MidSize < Size <= MaxSize during size -> class lookups. The lookup is
O(1) due to indexing a precomputed (via constexpr) table based on a size
table. Switch to this approach for the Android size class maps.

Other approaches considered:
- Binary search was found to have an unacceptable (~30%) performance cost.
- An approach using NEON instructions (see older version of D73824) was found
  to be slightly slower than this approach on newer SoCs but significantly
  slower on older ones.

By selecting the values in the size tables to minimize wastage (for example,
by passing the malloc_info output of a target program to the included
compute_size_class_config program), we can increase the density of allocations
at a small (~0.5% on bionic malloc_sql_trace as measured using an identity
table) performance cost.

Reduces RSS on specific Android processes as follows (KB):

                             Before  After
zygote (median of 50 runs)    26836  26792 (-0.2%)
zygote64 (median of 50 runs)  30384  30076 (-1.0%)
dex2oat (median of 3 runs)   375792 372952 (-0.8%)

I also measured the amount of whole-system idle dirty heap on Android by
rebooting the system and then running the following script repeatedly until
the results were stable:

for i in $(seq 1 50); do grep -A5 scudo: /proc/*/smaps | grep Pss: | cut -d: -f2 | awk '{s+=$1} END {print s}' ; sleep 1; done

I did this 3 times both before and after this change and the results were:

Before: 365650, 356795, 372663
After:  344521, 356328, 342589

These results are noisy so it is hard to make a definite conclusion, but
there does appear to be a significant effect.

On other platforms, increase the sizes of all size classes by a fixed offset
equal to the size of the allocation header. This has also been found to improve
density, since it is likely for allocation sizes to be a power of 2, which
would otherwise waste space by pushing the allocation into the next size class.

Differential Revision: https://reviews.llvm.org/D73824

GitOrigin-RevId: 041547eb4eb0fcb2155af7537aaed7f601ea6343
diff --git a/size_class_map.h b/size_class_map.h
index ff587c9..46f53ae 100644
--- a/size_class_map.h
+++ b/size_class_map.h
@@ -9,11 +9,33 @@
 #ifndef SCUDO_SIZE_CLASS_MAP_H_
 #define SCUDO_SIZE_CLASS_MAP_H_
 
+#include "chunk.h"
 #include "common.h"
 #include "string_utils.h"
 
 namespace scudo {
 
+inline uptr scaledLog2(uptr Size, uptr ZeroLog, uptr LogBits) {
+  const uptr L = getMostSignificantSetBitIndex(Size);
+  const uptr LBits = (Size >> (L - LogBits)) - (1 << LogBits);
+  const uptr HBits = (L - ZeroLog) << LogBits;
+  return LBits + HBits;
+}
+
+template <typename Config> struct SizeClassMapBase {
+  static u32 getMaxCachedHint(uptr Size) {
+    DCHECK_LE(Size, MaxSize);
+    DCHECK_NE(Size, 0);
+    u32 N;
+    // Force a 32-bit division if the template parameters allow for it.
+    if (Config::MaxBytesCachedLog > 31 || Config::MaxSizeLog > 31)
+      N = static_cast<u32>((1UL << Config::MaxBytesCachedLog) / Size);
+    else
+      N = (1U << Config::MaxBytesCachedLog) / static_cast<u32>(Size);
+    return Max(1U, Min(Config::MaxNumCachedHint, N));
+  }
+};
+
 // SizeClassMap maps allocation sizes into size classes and back, in an
 // efficient table-free manner.
 //
@@ -33,22 +55,24 @@
 // of chunks that can be cached per-thread:
 // - MaxNumCachedHint is a hint for the max number of chunks cached per class.
 // - 2^MaxBytesCachedLog is the max number of bytes cached per class.
+template <typename Config>
+class FixedSizeClassMap : public SizeClassMapBase<Config> {
+  typedef SizeClassMapBase<Config> Base;
 
-template <u8 NumBits, u8 MinSizeLog, u8 MidSizeLog, u8 MaxSizeLog,
-          u32 MaxNumCachedHintT, u8 MaxBytesCachedLog>
-class SizeClassMap {
-  static const uptr MinSize = 1UL << MinSizeLog;
-  static const uptr MidSize = 1UL << MidSizeLog;
+  static const uptr MinSize = 1UL << Config::MinSizeLog;
+  static const uptr MidSize = 1UL << Config::MidSizeLog;
   static const uptr MidClass = MidSize / MinSize;
-  static const u8 S = NumBits - 1;
+  static const u8 S = Config::NumBits - 1;
   static const uptr M = (1UL << S) - 1;
 
-public:
-  static const u32 MaxNumCachedHint = MaxNumCachedHintT;
+  static const uptr SizeDelta = Chunk::getHeaderSize();
 
-  static const uptr MaxSize = 1UL << MaxSizeLog;
+public:
+  static const u32 MaxNumCachedHint = Config::MaxNumCachedHint;
+
+  static const uptr MaxSize = (1UL << Config::MaxSizeLog) + SizeDelta;
   static const uptr NumClasses =
-      MidClass + ((MaxSizeLog - MidSizeLog) << S) + 1;
+      MidClass + ((Config::MaxSizeLog - Config::MidSizeLog) << S) + 1;
   static_assert(NumClasses <= 256, "");
   static const uptr LargestClassId = NumClasses - 1;
   static const uptr BatchClassId = 0;
@@ -56,97 +80,206 @@
   static uptr getSizeByClassId(uptr ClassId) {
     DCHECK_NE(ClassId, BatchClassId);
     if (ClassId <= MidClass)
-      return ClassId << MinSizeLog;
+      return (ClassId << Config::MinSizeLog) + SizeDelta;
     ClassId -= MidClass;
     const uptr T = MidSize << (ClassId >> S);
-    return T + (T >> S) * (ClassId & M);
+    return T + (T >> S) * (ClassId & M) + SizeDelta;
   }
 
   static uptr getClassIdBySize(uptr Size) {
+    if (Size <= SizeDelta + (1 << Config::MinSizeLog))
+      return 1;
+    Size -= SizeDelta;
     DCHECK_LE(Size, MaxSize);
     if (Size <= MidSize)
-      return (Size + MinSize - 1) >> MinSizeLog;
-    Size -= 1;
-    const uptr L = getMostSignificantSetBitIndex(Size);
-    const uptr LBits = (Size >> (L - S)) - (1 << S);
-    const uptr HBits = (L - MidSizeLog) << S;
-    return MidClass + 1 + HBits + LBits;
-  }
-
-  static u32 getMaxCachedHint(uptr Size) {
-    DCHECK_LE(Size, MaxSize);
-    DCHECK_NE(Size, 0);
-    u32 N;
-    // Force a 32-bit division if the template parameters allow for it.
-    if (MaxBytesCachedLog > 31 || MaxSizeLog > 31)
-      N = static_cast<u32>((1UL << MaxBytesCachedLog) / Size);
-    else
-      N = (1U << MaxBytesCachedLog) / static_cast<u32>(Size);
-    return Max(1U, Min(MaxNumCachedHint, N));
-  }
-
-  static void print() {
-    ScopedString Buffer(1024);
-    uptr PrevS = 0;
-    uptr TotalCached = 0;
-    for (uptr I = 0; I < NumClasses; I++) {
-      if (I == BatchClassId)
-        continue;
-      const uptr S = getSizeByClassId(I);
-      if (S >= MidSize / 2 && (S & (S - 1)) == 0)
-        Buffer.append("\n");
-      const uptr D = S - PrevS;
-      const uptr P = PrevS ? (D * 100 / PrevS) : 0;
-      const uptr L = S ? getMostSignificantSetBitIndex(S) : 0;
-      const uptr Cached = getMaxCachedHint(S) * S;
-      Buffer.append(
-          "C%02zu => S: %zu diff: +%zu %02zu%% L %zu Cached: %zu %zu; id %zu\n",
-          I, getSizeByClassId(I), D, P, L, getMaxCachedHint(S), Cached,
-          getClassIdBySize(S));
-      TotalCached += Cached;
-      PrevS = S;
-    }
-    Buffer.append("Total Cached: %zu\n", TotalCached);
-    Buffer.output();
-  }
-
-  static void validate() {
-    for (uptr C = 0; C < NumClasses; C++) {
-      if (C == BatchClassId)
-        continue;
-      const uptr S = getSizeByClassId(C);
-      CHECK_NE(S, 0U);
-      CHECK_EQ(getClassIdBySize(S), C);
-      if (C < LargestClassId)
-        CHECK_EQ(getClassIdBySize(S + 1), C + 1);
-      CHECK_EQ(getClassIdBySize(S - 1), C);
-      if (C - 1 != BatchClassId)
-        CHECK_GT(getSizeByClassId(C), getSizeByClassId(C - 1));
-    }
-    // Do not perform the loop if the maximum size is too large.
-    if (MaxSizeLog > 19)
-      return;
-    for (uptr S = 1; S <= MaxSize; S++) {
-      const uptr C = getClassIdBySize(S);
-      CHECK_LT(C, NumClasses);
-      CHECK_GE(getSizeByClassId(C), S);
-      if (C - 1 != BatchClassId)
-        CHECK_LT(getSizeByClassId(C - 1), S);
-    }
+      return (Size + MinSize - 1) >> Config::MinSizeLog;
+    return MidClass + 1 + scaledLog2(Size - 1, Config::MidSizeLog, S);
   }
 };
 
-typedef SizeClassMap<3, 5, 8, 17, 8, 10> DefaultSizeClassMap;
+template <typename Config>
+class TableSizeClassMap : public SizeClassMapBase<Config> {
+  static const u8 S = Config::NumBits - 1;
+  static const uptr M = (1UL << S) - 1;
+  static const uptr ClassesSize =
+      sizeof(Config::Classes) / sizeof(Config::Classes[0]);
 
-// TODO(kostyak): further tune class maps for Android & Fuchsia.
+  struct SizeTable {
+    constexpr SizeTable() {
+      uptr Pos = 1 << Config::MidSizeLog;
+      uptr Inc = 1 << (Config::MidSizeLog - S);
+      for (uptr i = 0; i != getTableSize(); ++i) {
+        Pos += Inc;
+        if ((Pos & (Pos - 1)) == 0)
+          Inc *= 2;
+        Tab[i] = computeClassId(Pos + Config::SizeDelta);
+      }
+    }
+
+    constexpr static u8 computeClassId(uptr Size) {
+      for (uptr i = 0; i != ClassesSize; ++i) {
+        if (Size <= Config::Classes[i])
+          return i + 1;
+      }
+      return -1;
+    }
+
+    constexpr static uptr getTableSize() {
+      return (Config::MaxSizeLog - Config::MidSizeLog) << S;
+    }
+
+    u8 Tab[getTableSize()] = {};
+  };
+
+  static constexpr SizeTable Table = {};
+
+public:
+  static const u32 MaxNumCachedHint = Config::MaxNumCachedHint;
+
+  static const uptr NumClasses = ClassesSize + 1;
+  static_assert(NumClasses < 256, "");
+  static const uptr LargestClassId = NumClasses - 1;
+  static const uptr BatchClassId = 0;
+  static const uptr MaxSize = Config::Classes[LargestClassId - 1];
+
+  static uptr getSizeByClassId(uptr ClassId) {
+    return Config::Classes[ClassId - 1];
+  }
+
+  static uptr getClassIdBySize(uptr Size) {
+    if (Size <= Config::Classes[0])
+      return 1;
+    Size -= Config::SizeDelta;
+    DCHECK_LE(Size, MaxSize);
+    if (Size <= (1 << Config::MidSizeLog))
+      return ((Size - 1) >> Config::MinSizeLog) + 1;
+    return Table.Tab[scaledLog2(Size - 1, Config::MidSizeLog, S)];
+  }
+
+  static void print() {}
+  static void validate() {}
+};
+
+struct AndroidSizeClassConfig {
 #if SCUDO_WORDSIZE == 64U
-typedef SizeClassMap<4, 4, 8, 14, 4, 10> SvelteSizeClassMap;
-typedef SizeClassMap<2, 5, 9, 16, 14, 14> AndroidSizeClassMap;
-#else
-typedef SizeClassMap<4, 3, 7, 14, 5, 10> SvelteSizeClassMap;
-typedef SizeClassMap<3, 4, 9, 16, 14, 14> AndroidSizeClassMap;
-#endif
+  // Measured using a system_server profile.
+  static const uptr NumBits = 7;
+  static const uptr MinSizeLog = 4;
+  static const uptr MidSizeLog = 6;
+  static const uptr MaxSizeLog = 16;
+  static const u32 MaxNumCachedHint = 14;
+  static const uptr MaxBytesCachedLog = 14;
 
+  static constexpr u32 Classes[] = {
+      0x00020, 0x00030, 0x00040, 0x00050, 0x00060, 0x00070, 0x00090, 0x000a0,
+      0x000b0, 0x000e0, 0x00110, 0x00130, 0x001a0, 0x00240, 0x00320, 0x00430,
+      0x00640, 0x00830, 0x00a10, 0x00c30, 0x01010, 0x01150, 0x01ad0, 0x02190,
+      0x03610, 0x04010, 0x04510, 0x04d10, 0x05a10, 0x07310, 0x09610, 0x10010,
+  };
+  static const uptr SizeDelta = 16;
+#else
+  // Measured using a dex2oat profile.
+  static const uptr NumBits = 8;
+  static const uptr MinSizeLog = 4;
+  static const uptr MidSizeLog = 8;
+  static const uptr MaxSizeLog = 16;
+  static const u32 MaxNumCachedHint = 14;
+  static const uptr MaxBytesCachedLog = 14;
+
+  static constexpr u32 Classes[] = {
+      0x00020, 0x00030, 0x00040, 0x00050, 0x00060, 0x00070, 0x00080, 0x00090,
+      0x000a0, 0x000b0, 0x000c0, 0x000d0, 0x000e0, 0x000f0, 0x00100, 0x00110,
+      0x00120, 0x00140, 0x00150, 0x00170, 0x00190, 0x001c0, 0x001f0, 0x00220,
+      0x00240, 0x00260, 0x002a0, 0x002e0, 0x00310, 0x00340, 0x00380, 0x003b0,
+      0x003e0, 0x00430, 0x00490, 0x00500, 0x00570, 0x005f0, 0x00680, 0x00720,
+      0x007d0, 0x00890, 0x00970, 0x00a50, 0x00b80, 0x00cb0, 0x00e30, 0x00fb0,
+      0x011b0, 0x01310, 0x01470, 0x01790, 0x01b50, 0x01fd0, 0x02310, 0x02690,
+      0x02b10, 0x02fd0, 0x03610, 0x03e10, 0x04890, 0x05710, 0x06a90, 0x10010,
+  };
+  static const uptr SizeDelta = 16;
+#endif
+};
+
+typedef TableSizeClassMap<AndroidSizeClassConfig> AndroidSizeClassMap;
+
+struct DefaultSizeClassConfig {
+  static const uptr NumBits = 3;
+  static const uptr MinSizeLog = 5;
+  static const uptr MidSizeLog = 8;
+  static const uptr MaxSizeLog = 17;
+  static const u32 MaxNumCachedHint = 8;
+  static const uptr MaxBytesCachedLog = 10;
+};
+
+typedef FixedSizeClassMap<DefaultSizeClassConfig> DefaultSizeClassMap;
+
+struct SvelteSizeClassConfig {
+#if SCUDO_WORDSIZE == 64U
+  static const uptr NumBits = 4;
+  static const uptr MinSizeLog = 4;
+  static const uptr MidSizeLog = 8;
+  static const uptr MaxSizeLog = 14;
+  static const u32 MaxNumCachedHint = 4;
+  static const uptr MaxBytesCachedLog = 10;
+#else
+  static const uptr NumBits = 4;
+  static const uptr MinSizeLog = 3;
+  static const uptr MidSizeLog = 7;
+  static const uptr MaxSizeLog = 14;
+  static const u32 MaxNumCachedHint = 5;
+  static const uptr MaxBytesCachedLog = 10;
+#endif
+};
+
+typedef FixedSizeClassMap<SvelteSizeClassConfig> SvelteSizeClassMap;
+
+template <typename SCMap> inline void printMap() {
+  ScopedString Buffer(1024);
+  uptr PrevS = 0;
+  uptr TotalCached = 0;
+  for (uptr I = 0; I < SCMap::NumClasses; I++) {
+    if (I == SCMap::BatchClassId)
+      continue;
+    const uptr S = SCMap::getSizeByClassId(I);
+    const uptr D = S - PrevS;
+    const uptr P = PrevS ? (D * 100 / PrevS) : 0;
+    const uptr L = S ? getMostSignificantSetBitIndex(S) : 0;
+    const uptr Cached = SCMap::getMaxCachedHint(S) * S;
+    Buffer.append(
+        "C%02zu => S: %zu diff: +%zu %02zu%% L %zu Cached: %zu %zu; id %zu\n",
+        I, S, D, P, L, SCMap::getMaxCachedHint(S), Cached,
+        SCMap::getClassIdBySize(S));
+    TotalCached += Cached;
+    PrevS = S;
+  }
+  Buffer.append("Total Cached: %zu\n", TotalCached);
+  Buffer.output();
+}
+
+template <typename SCMap> static void validateMap() {
+  for (uptr C = 0; C < SCMap::NumClasses; C++) {
+    if (C == SCMap::BatchClassId)
+      continue;
+    const uptr S = SCMap::getSizeByClassId(C);
+    CHECK_NE(S, 0U);
+    CHECK_EQ(SCMap::getClassIdBySize(S), C);
+    if (C < SCMap::LargestClassId)
+      CHECK_EQ(SCMap::getClassIdBySize(S + 1), C + 1);
+    CHECK_EQ(SCMap::getClassIdBySize(S - 1), C);
+    if (C - 1 != SCMap::BatchClassId)
+      CHECK_GT(SCMap::getSizeByClassId(C), SCMap::getSizeByClassId(C - 1));
+  }
+  // Do not perform the loop if the maximum size is too large.
+  if (SCMap::MaxSize > (1 << 19))
+    return;
+  for (uptr S = 1; S <= SCMap::MaxSize; S++) {
+    const uptr C = SCMap::getClassIdBySize(S);
+    CHECK_LT(C, SCMap::NumClasses);
+    CHECK_GE(SCMap::getSizeByClassId(C), S);
+    if (C - 1 != SCMap::BatchClassId)
+      CHECK_LT(SCMap::getSizeByClassId(C - 1), S);
+  }
+}
 } // namespace scudo
 
 #endif // SCUDO_SIZE_CLASS_MAP_H_