scudo: Move the management of the UseMemoryTagging bit out of the Primary. NFCI.

The primary and secondary allocators will need to share this bit,
so move the management of the bit to the combined allocator and
make useMemoryTagging() a free function.

Differential Revision: https://reviews.llvm.org/D93730

GitOrigin-RevId: faac1c02c802048efa17f8f6cda8f39b5584f0c6
diff --git a/combined.h b/combined.h
index fae71ba..911c49d 100644
--- a/combined.h
+++ b/combined.h
@@ -100,7 +100,7 @@
 
       // Reset tag to 0 as this chunk may have been previously used for a tagged
       // user allocation.
-      if (UNLIKELY(Allocator.useMemoryTagging()))
+      if (UNLIKELY(useMemoryTagging<Params>(Allocator.Primary.Options.load())))
         storeTags(reinterpret_cast<uptr>(Ptr),
                   reinterpret_cast<uptr>(Ptr) + sizeof(QuarantineBatch));
 
@@ -161,6 +161,9 @@
       Primary.Options.set(OptionBit::DeallocTypeMismatch);
     if (getFlags()->delete_size_mismatch)
       Primary.Options.set(OptionBit::DeleteSizeMismatch);
+    if (allocatorSupportsMemoryTagging<Params>() &&
+        systemSupportsMemoryTagging())
+      Primary.Options.set(OptionBit::UseMemoryTagging);
     Primary.Options.set(OptionBit::UseOddEvenTags);
 
     QuarantineMaxChunkSize =
@@ -240,7 +243,7 @@
   }
 
   ALWAYS_INLINE void *untagPointerMaybe(void *Ptr) {
-    if (Primary.SupportsMemoryTagging)
+    if (allocatorSupportsMemoryTagging<Params>())
       return reinterpret_cast<void *>(
           untagPointer(reinterpret_cast<uptr>(Ptr)));
     return Ptr;
@@ -367,7 +370,7 @@
       //
       // When memory tagging is enabled, zeroing the contents is done as part of
       // setting the tag.
-      if (UNLIKELY(useMemoryTagging(Options))) {
+      if (UNLIKELY(useMemoryTagging<Params>(Options))) {
         uptr PrevUserPtr;
         Chunk::UnpackedHeader Header;
         const uptr BlockSize = PrimaryT::getSizeByClassId(ClassId);
@@ -594,7 +597,7 @@
                      : BlockEnd - (reinterpret_cast<uptr>(OldPtr) + NewSize)) &
             Chunk::SizeOrUnusedBytesMask;
         Chunk::compareExchangeHeader(Cookie, OldPtr, &NewHeader, &OldHeader);
-        if (UNLIKELY(ClassId && useMemoryTagging(Options))) {
+        if (UNLIKELY(ClassId && useMemoryTagging<Params>(Options))) {
           resizeTaggedChunk(reinterpret_cast<uptr>(OldTaggedPtr) + OldSize,
                             reinterpret_cast<uptr>(OldTaggedPtr) + NewSize,
                             BlockEnd);
@@ -692,7 +695,7 @@
       if (getChunkFromBlock(Block, &Chunk, &Header) &&
           Header.State == Chunk::State::Allocated) {
         uptr TaggedChunk = Chunk;
-        if (useMemoryTagging(Primary.Options.load()))
+        if (useMemoryTagging<Params>(Primary.Options.load()))
           TaggedChunk = loadTag(Chunk);
         Callback(TaggedChunk, getSize(reinterpret_cast<void *>(Chunk), &Header),
                  Arg);
@@ -783,15 +786,14 @@
            Header.State == Chunk::State::Allocated;
   }
 
-  bool useMemoryTagging() const {
-    return useMemoryTagging(Primary.Options.load());
+  bool useMemoryTaggingTestOnly() const {
+    return useMemoryTagging<Params>(Primary.Options.load());
   }
-  static bool useMemoryTagging(Options Options) {
-    return PrimaryT::useMemoryTagging(Options);
+  void disableMemoryTagging() {
+    if (allocatorSupportsMemoryTagging<Params>())
+      Primary.Options.clear(OptionBit::UseMemoryTagging);
   }
 
-  void disableMemoryTagging() { Primary.disableMemoryTagging(); }
-
   void setTrackAllocationStacks(bool Track) {
     initThreadMaybe();
     if (Track)
@@ -823,7 +825,7 @@
                            const char *MemoryTags, uintptr_t MemoryAddr,
                            size_t MemorySize) {
     *ErrorInfo = {};
-    if (!PrimaryT::SupportsMemoryTagging ||
+    if (!allocatorSupportsMemoryTagging<Params>() ||
         MemoryAddr + MemorySize < MemoryAddr)
       return;
 
@@ -942,7 +944,7 @@
 
   static_assert(MinAlignment >= sizeof(Chunk::PackedHeader),
                 "Minimal alignment must at least cover a chunk header.");
-  static_assert(!PrimaryT::SupportsMemoryTagging ||
+  static_assert(!allocatorSupportsMemoryTagging<Params>() ||
                     MinAlignment >= archMemoryTagGranuleSize(),
                 "");
 
@@ -1037,7 +1039,7 @@
   void quarantineOrDeallocateChunk(Options Options, void *Ptr,
                                    Chunk::UnpackedHeader *Header, uptr Size) {
     Chunk::UnpackedHeader NewHeader = *Header;
-    if (UNLIKELY(NewHeader.ClassId && useMemoryTagging(Options))) {
+    if (UNLIKELY(NewHeader.ClassId && useMemoryTagging<Params>(Options))) {
       u8 PrevTag = extractTag(loadTag(reinterpret_cast<uptr>(Ptr)));
       if (!TSDRegistry.getDisableMemInit()) {
         uptr TaggedBegin, TaggedEnd;
diff --git a/options.h b/options.h
index 2cffc4d..91301bf 100644
--- a/options.h
+++ b/options.h
@@ -11,6 +11,7 @@
 
 #include "atomic_helpers.h"
 #include "common.h"
+#include "memtag.h"
 
 namespace scudo {
 
@@ -36,6 +37,11 @@
   }
 };
 
+template <typename Config> bool useMemoryTagging(Options Options) {
+  return allocatorSupportsMemoryTagging<Config>() &&
+         Options.get(OptionBit::UseMemoryTagging);
+}
+
 struct AtomicOptions {
   atomic_u32 Val;
 
diff --git a/primary32.h b/primary32.h
index c744670..a88a2a6 100644
--- a/primary32.h
+++ b/primary32.h
@@ -50,7 +50,6 @@
   typedef SizeClassAllocator32<Config> ThisT;
   typedef SizeClassAllocatorLocalCache<ThisT> CacheT;
   typedef typename CacheT::TransferBatch TransferBatch;
-  static const bool SupportsMemoryTagging = false;
 
   static uptr getSizeByClassId(uptr ClassId) {
     return (ClassId == SizeClassMap::BatchClassId)
@@ -216,9 +215,6 @@
     return TotalReleasedBytes;
   }
 
-  static bool useMemoryTagging(UNUSED Options Options) { return false; }
-  void disableMemoryTagging() {}
-
   const char *getRegionInfoArrayAddress() const { return nullptr; }
   static uptr getRegionInfoArraySize() { return 0; }
 
diff --git a/primary64.h b/primary64.h
index df1310a..2724a25 100644
--- a/primary64.h
+++ b/primary64.h
@@ -46,8 +46,6 @@
   typedef SizeClassAllocator64<Config> ThisT;
   typedef SizeClassAllocatorLocalCache<ThisT> CacheT;
   typedef typename CacheT::TransferBatch TransferBatch;
-  static const bool SupportsMemoryTagging =
-      allocatorSupportsMemoryTagging<Config>();
 
   static uptr getSizeByClassId(uptr ClassId) {
     return (ClassId == SizeClassMap::BatchClassId)
@@ -76,9 +74,6 @@
       Region->ReleaseInfo.LastReleaseAtNs = Time;
     }
     setOption(Option::ReleaseInterval, static_cast<sptr>(ReleaseToOsInterval));
-
-    if (SupportsMemoryTagging && systemSupportsMemoryTagging())
-      Options.set(OptionBit::UseMemoryTagging);
   }
   void init(s32 ReleaseToOsInterval) {
     memset(this, 0, sizeof(*this));
@@ -193,11 +188,6 @@
     return TotalReleasedBytes;
   }
 
-  static bool useMemoryTagging(Options Options) {
-    return SupportsMemoryTagging && Options.get(OptionBit::UseMemoryTagging);
-  }
-  void disableMemoryTagging() { Options.clear(OptionBit::UseMemoryTagging); }
-
   const char *getRegionInfoArrayAddress() const {
     return reinterpret_cast<const char *>(RegionInfoArray);
   }
@@ -335,7 +325,7 @@
       if (!map(reinterpret_cast<void *>(RegionBeg + MappedUser), UserMapSize,
                "scudo:primary",
                MAP_ALLOWNOMEM | MAP_RESIZABLE |
-                   (useMemoryTagging(Options.load()) ? MAP_MEMTAG : 0),
+                   (useMemoryTagging<Config>(Options.load()) ? MAP_MEMTAG : 0),
                &Region->Data))
         return nullptr;
       Region->MappedUser += UserMapSize;
diff --git a/tests/combined_test.cpp b/tests/combined_test.cpp
index 5387493..7bb6725 100644
--- a/tests/combined_test.cpp
+++ b/tests/combined_test.cpp
@@ -47,7 +47,7 @@
 template <class AllocatorT>
 bool isTaggedAllocation(AllocatorT *Allocator, scudo::uptr Size,
                         scudo::uptr Alignment) {
-  return Allocator->useMemoryTagging() &&
+  return Allocator->useMemoryTaggingTestOnly() &&
          scudo::systemDetectsMemoryTagFaultsTestOnly() &&
          isPrimaryAllocation<AllocatorT>(Size, Alignment);
 }
@@ -162,7 +162,7 @@
       for (scudo::uptr I = 0; I < Size; I++) {
         unsigned char V = (reinterpret_cast<unsigned char *>(P))[I];
         if (isPrimaryAllocation<AllocatorT>(Size, 1U << MinAlignLog) &&
-            !Allocator->useMemoryTagging())
+            !Allocator->useMemoryTaggingTestOnly())
           ASSERT_EQ(V, scudo::PatternFillByte);
         else
           ASSERT_TRUE(V == scudo::PatternFillByte || V == 0);
@@ -248,7 +248,7 @@
 
   Allocator->releaseToOS();
 
-  if (Allocator->useMemoryTagging() &&
+  if (Allocator->useMemoryTaggingTestOnly() &&
       scudo::systemDetectsMemoryTagFaultsTestOnly()) {
     // Check that use-after-free is detected.
     for (scudo::uptr SizeLog = 0U; SizeLog <= 20U; SizeLog++) {
@@ -493,7 +493,7 @@
   using SizeClassMap = AllocatorT::PrimaryT::SizeClassMap;
   auto Allocator = std::unique_ptr<AllocatorT>(new AllocatorT());
 
-  if (!Allocator->useMemoryTagging())
+  if (!Allocator->useMemoryTaggingTestOnly())
     return;
 
   auto CheckOddEven = [](scudo::uptr P1, scudo::uptr P2) {