Bug 1402284 - Separate arenas created from moz_arena_* functions from others. r?njn draft
authorMike Hommey <mh+mozilla@glandium.org>
Tue, 31 Oct 2017 07:13:39 +0900
changeset 689889 d975888c2626073c86a83b98d78188d5b6fa0086
parent 689888 8959c98b53fc4a3a0cae69ab1c8d0a8307957b2f
child 738413 2faf6347d77983f2194595e514919f8aa26a2af3
push id87131
push userbmo:mh+mozilla@glandium.org
push dateWed, 01 Nov 2017 03:13:46 +0000
reviewersnjn
bugs1402284
milestone58.0a1
Bug 1402284 - Separate arenas created from moz_arena_* functions from others. r?njn We introduce the notion of private arenas, separate from other arenas (main and thread-local). They are kept in a separate arena tree, and arena lookups from moz_arena_* functions only access the tree of private arenas. Iteration still goes through all arenas, private and non-private.
memory/build/mozjemalloc.cpp
memory/build/rb.h
--- a/memory/build/mozjemalloc.cpp
+++ b/memory/build/mozjemalloc.cpp
@@ -1049,56 +1049,94 @@ struct ArenaTreeTrait
   {
     MOZ_ASSERT(aNode);
     MOZ_ASSERT(aOther);
     return (aNode->mId > aOther->mId) - (aNode->mId < aOther->mId);
   }
 };
 
 // Bookkeeping for all the arenas used by the allocator.
+// Arenas are separated in two categories:
+// - "private" arenas, used through the moz_arena_* API
+// - all the other arenas: the default arena, and thread-local arenas,
+//   used by the standard API.
 class ArenaCollection
 {
 public:
   bool Init()
   {
     mArenas.Init();
-    mDefaultArena = mLock.Init() ? CreateArena() : nullptr;
+    mPrivateArenas.Init();
+    mDefaultArena = mLock.Init() ? CreateArena(/* IsPrivate = */ false) : nullptr;
     if (mDefaultArena) {
       // arena_t constructor sets this to a lower value for thread local
       // arenas; Reset to the default value for the main arena.
       mDefaultArena->mMaxDirty = opt_dirty_max;
     }
     return bool(mDefaultArena);
   }
 
-  inline arena_t* GetById(arena_id_t aArenaId);
-
-  arena_t* CreateArena();
+  inline arena_t* GetById(arena_id_t aArenaId, bool aIsPrivate);
+
+  arena_t* CreateArena(bool aIsPrivate);
 
   void DisposeArena(arena_t* aArena)
   {
     MutexAutoLock lock(mLock);
-    mArenas.Remove(aArena);
+    (mPrivateArenas.Search(aArena) ? mPrivateArenas : mArenas).Remove(aArena);
     // The arena is leaked, and remaining allocations in it still are alive
     // until they are freed. After that, the arena will be empty but still
     // taking have at least a chunk taking address space. TODO: bug 1364359.
   }
 
-  using Iterator = RedBlackTree<arena_t, ArenaTreeTrait>::Iterator;
-
-  Iterator iter() { return mArenas.iter(); }
+  using Tree = RedBlackTree<arena_t, ArenaTreeTrait>;
+
+  struct Iterator : Tree::Iterator
+  {
+    explicit Iterator(Tree* aTree, Tree* aSecondTree)
+      : Tree::Iterator(aTree)
+      , mNextTree(aSecondTree)
+    {
+    }
+
+    Item<Iterator> begin()
+    {
+      return Item<Iterator>(this, *Tree::Iterator::begin());
+    }
+
+    Item<Iterator> end()
+    {
+      return Item<Iterator>(this, nullptr);
+    }
+
+    Tree::TreeNode* Next()
+    {
+      Tree::TreeNode* result = Tree::Iterator::Next();
+      if (!result && mNextTree) {
+	new (this) Iterator(mNextTree, nullptr);
+	result = reinterpret_cast<Tree::TreeNode*>(*Tree::Iterator::begin());
+      }
+      return result;
+    }
+
+  private:
+    Tree* mNextTree;
+  };
+
+  Iterator iter() { return Iterator(&mArenas, &mPrivateArenas); }
 
   inline arena_t* GetDefault() { return mDefaultArena; }
 
   Mutex mLock;
 
 private:
   arena_t* mDefaultArena;
   arena_id_t mLastArenaId;
-  RedBlackTree<arena_t, ArenaTreeTrait> mArenas;
+  Tree mArenas;
+  Tree mPrivateArenas;
 };
 
 static ArenaCollection gArenas;
 
 // ******
 // Chunks.
 static AddressRadixTree<(sizeof(void*) << 3) - CHUNK_2POW_DEFAULT> gChunkRTree;
 
@@ -2172,17 +2210,17 @@ thread_local_arena(bool enabled)
 {
   arena_t* arena;
 
   if (enabled) {
     // The arena will essentially be leaked if this function is
     // called with `false`, but it doesn't matter at the moment.
     // because in practice nothing actually calls this function
     // with `false`, except maybe at shutdown.
-    arena = gArenas.CreateArena();
+    arena = gArenas.CreateArena(/* IsPrivate = */ false);
   } else {
     arena = gArenas.GetDefault();
   }
   thread_arena.set(arena);
   return arena;
 }
 
 template<>
@@ -3838,17 +3876,17 @@ arena_t::arena_t()
   }
 
 #if defined(MOZ_DIAGNOSTIC_ASSERT_ENABLED)
   mMagic = ARENA_MAGIC;
 #endif
 }
 
 arena_t*
-ArenaCollection::CreateArena()
+ArenaCollection::CreateArena(bool aIsPrivate)
 {
   fallible_t fallible;
   arena_t* ret = new (fallible) arena_t();
   if (!ret) {
     // Only reached if there is an OOM error.
 
     // OOM here is quite inconvenient to propagate, since dealing with it
     // would require a check for failure in the fast path.  Instead, punt
@@ -3858,17 +3896,17 @@ ArenaCollection::CreateArena()
 
     return mDefaultArena;
   }
 
   MutexAutoLock lock(mLock);
 
   // TODO: Use random Ids.
   ret->mId = mLastArenaId++;
-  mArenas.Insert(ret);
+  (aIsPrivate ? mPrivateArenas : mArenas).Insert(ret);
   return ret;
 }
 
 // End arena.
 // ***************************************************************************
 // Begin general internal functions.
 
 static void*
@@ -4727,59 +4765,59 @@ MozJemalloc::jemalloc_free_dirty_pages(v
     for (auto arena : gArenas.iter()) {
       MutexAutoLock arena_lock(arena->mLock);
       arena->Purge(true);
     }
   }
 }
 
 inline arena_t*
-ArenaCollection::GetById(arena_id_t aArenaId)
+ArenaCollection::GetById(arena_id_t aArenaId, bool aIsPrivate)
 {
   if (!malloc_initialized) {
     return nullptr;
   }
   // Use AlignedStorage2 to avoid running the arena_t constructor, while
   // we only need it as a placeholder for mId.
   mozilla::AlignedStorage2<arena_t> key;
   key.addr()->mId = aArenaId;
   MutexAutoLock lock(mLock);
-  arena_t* result = mArenas.Search(key.addr());
+  arena_t* result = (aIsPrivate ? mPrivateArenas : mArenas).Search(key.addr());
   MOZ_RELEASE_ASSERT(result);
   return result;
 }
 
 #ifdef NIGHTLY_BUILD
 template<>
 inline arena_id_t
 MozJemalloc::moz_create_arena()
 {
   if (malloc_init()) {
-    arena_t* arena = gArenas.CreateArena();
+    arena_t* arena = gArenas.CreateArena(/* IsPrivate = */ true);
     return arena->mId;
   }
   return 0;
 }
 
 template<>
 inline void
 MozJemalloc::moz_dispose_arena(arena_id_t aArenaId)
 {
-  arena_t* arena = gArenas.GetById(aArenaId);
+  arena_t* arena = gArenas.GetById(aArenaId, /* IsPrivate = */ true);
   if (arena) {
     gArenas.DisposeArena(arena);
   }
 }
 
 #define MALLOC_DECL(name, return_type, ...)                                    \
   template<>                                                                   \
   inline return_type MozJemalloc::moz_arena_##name(                            \
     arena_id_t aArenaId, ARGS_HELPER(TYPED_ARGS, ##__VA_ARGS__))               \
   {                                                                            \
-    BaseAllocator allocator(gArenas.GetById(aArenaId));           \
+    BaseAllocator allocator(gArenas.GetById(aArenaId, /* IsPrivate = */ true));\
     return allocator.name(ARGS_HELPER(ARGS, ##__VA_ARGS__));                   \
   }
 #define MALLOC_FUNCS MALLOC_FUNCS_MALLOC_BASE
 #include "malloc_decls.h"
 
 #else
 
 #define MALLOC_DECL(name, return_type, ...)                                    \
--- a/memory/build/rb.h
+++ b/memory/build/rb.h
@@ -173,17 +173,16 @@ public:
     Insert(reinterpret_cast<TreeNode*>(aNode));
   }
 
   void Remove(T* aNode)
   {
     return Remove(reinterpret_cast<TreeNode*>(aNode));
   }
 
-private:
   /* Helper class to avoid having all the tree traversal code further below
    * have to use Trait::GetTreeNode, adding visual noise. */
   struct TreeNode : public T
   {
     TreeNode* Left()
     {
       return (TreeNode*)Trait::GetTreeNode(this).Left();
     }
@@ -219,16 +218,17 @@ private:
     }
 
     void SetColor(NodeColor aColor)
     {
       Trait::GetTreeNode(this).SetColor(aColor);
     }
   };
 
+private:
   TreeNode* mRoot;
 
   TreeNode* First(TreeNode* aStart)
   {
     TreeNode* ret;
     for (ret = aStart ? aStart : mRoot; ret && ret->Left(); ret = ret->Left()) {
     }
     return ret;
@@ -728,16 +728,17 @@ public:
         TreeNode* node;
         mPath[mDepth++] = aTree->mRoot;
         while ((node = mPath[mDepth - 1]->Left())) {
           mPath[mDepth++] = node;
         }
       }
     }
 
+    template<typename Iterator>
     class Item
     {
       Iterator* mIterator;
       T* mItem;
 
     public:
       Item(Iterator* aIterator, T* aItem)
         : mIterator(aIterator)
@@ -753,24 +754,24 @@ public:
 
       const Item& operator++()
       {
         mItem = mIterator->Next();
         return *this;
       }
     };
 
-    Item begin()
+    Item<Iterator> begin()
     {
-      return Item(this, mDepth > 0 ? mPath[mDepth - 1] : nullptr);
+      return Item<Iterator>(this, mDepth > 0 ? mPath[mDepth - 1] : nullptr);
     }
 
-    Item end()
+    Item<Iterator> end()
     {
-      return Item(this, nullptr);
+      return Item<Iterator>(this, nullptr);
     }
 
     TreeNode* Next()
     {
       TreeNode* node;
       if ((node = mPath[mDepth - 1]->Right())) {
         /* The successor is the left-most node in the right subtree. */
         mPath[mDepth++] = node;