Bug 1403444 - Remove typedefs for RedBlackTrees. r?njn draft
authorMike Hommey <mh+mozilla@glandium.org>
Thu, 28 Sep 2017 06:58:37 +0900
changeset 671634 6c996be9a07f4e4568edaf210e5436b4bfe4a76e
parent 671633 d22176c1c2ac2a0a5d07c8c59ee16e107ca20fd6
child 733569 beb3dfda7beab2c07d40aa768ee7dd3717b120d4
push id81993
push userbmo:mh+mozilla@glandium.org
push dateThu, 28 Sep 2017 04:40:59 +0000
reviewersnjn
bugs1403444
milestone58.0a1
Bug 1403444 - Remove typedefs for RedBlackTrees. r?njn
memory/build/mozjemalloc.cpp
--- a/memory/build/mozjemalloc.cpp
+++ b/memory/build/mozjemalloc.cpp
@@ -784,19 +784,16 @@ struct ArenaAvailTreeTrait : public Aren
   {
     size_t size1 = aNode->bits & ~pagesize_mask;
     size_t size2 = aOther->bits & ~pagesize_mask;
     int ret = (size1 > size2) - (size1 < size2);
     return ret ? ret : CompareAddr((aNode->bits & CHUNK_MAP_KEY) ? nullptr : aNode, aOther);
   }
 };
 
-typedef RedBlackTree<arena_chunk_map_t, ArenaAvailTreeTrait> arena_avail_tree_t;
-typedef RedBlackTree<arena_chunk_map_t, ArenaRunTreeTrait> arena_run_tree_t;
-
 /* Arena chunk header. */
 struct arena_chunk_t {
 	/* Arena that owns the chunk. */
 	arena_t		*arena;
 
 	/* Linkage for the arena's tree of dirty chunks. */
 	RedBlackTreeNode<arena_chunk_t> link_dirty;
 
@@ -827,18 +824,16 @@ struct ArenaDirtyChunkTrait
   static inline int Compare(arena_chunk_t* aNode, arena_chunk_t* aOther)
   {
     MOZ_ASSERT(aNode);
     MOZ_ASSERT(aOther);
     return CompareAddr(aNode, aOther);
   }
 };
 
-typedef RedBlackTree<arena_chunk_t, ArenaDirtyChunkTrait> arena_chunk_tree_t;
-
 #ifdef MALLOC_DOUBLE_PURGE
 namespace mozilla {
 
 template<>
 struct GetDoublyLinkedListElement<arena_chunk_t>
 {
   static DoublyLinkedListElement<arena_chunk_t>& Get(arena_chunk_t* aThis)
   {
@@ -877,17 +872,17 @@ struct arena_bin_t {
 
 	/*
 	 * Tree of non-full runs.  This tree is used when looking for an
 	 * existing run when runcur is no longer usable.  We choose the
 	 * non-full run that is lowest in memory; this policy tends to keep
 	 * objects packed well, and it can also help reduce the number of
 	 * almost-empty chunks.
 	 */
-	arena_run_tree_t runs;
+	RedBlackTree<arena_chunk_map_t, ArenaRunTreeTrait> runs;
 
 	/* Size of regions in a run for this bin's size class. */
 	size_t		reg_size;
 
 	/* Total size of a run for this bin's size class. */
 	size_t		run_size;
 
 	/* Total number of regions in a run for this bin's size class. */
@@ -915,17 +910,17 @@ struct arena_t {
 
   /* All operations on this arena require that lock be locked. */
   malloc_spinlock_t mLock;
 
   arena_stats_t mStats;
 
 private:
   /* Tree of dirty-page-containing chunks this arena manages. */
-  arena_chunk_tree_t mChunksDirty;
+  RedBlackTree<arena_chunk_t, ArenaDirtyChunkTrait> mChunksDirty;
 
 #ifdef MALLOC_DOUBLE_PURGE
   /* Head of a linked list of MADV_FREE'd-page-containing chunks this
    * arena manages. */
   mozilla::DoublyLinkedList<arena_chunk_t> mChunksMAdvised;
 #endif
 
   /*
@@ -953,17 +948,17 @@ public:
    */
   size_t mMaxDirty;
 
 private:
   /*
    * Size/address-ordered tree of this arena's available runs.  This tree
    * is used for first-best-fit run allocation.
    */
-  arena_avail_tree_t mRunsAvail;
+  RedBlackTree<arena_chunk_map_t, ArenaAvailTreeTrait> mRunsAvail;
 
 public:
   /*
    * mBins is used to store rings of free regions of the following sizes,
    * assuming a 16-byte quantum, 4kB pagesize, and default MALLOC_OPTIONS.
    *
    *   mBins[i] | size |
    *   --------+------+
@@ -1043,18 +1038,16 @@ struct ArenaTreeTrait
   static inline int Compare(arena_t* aNode, arena_t* aOther)
   {
     MOZ_ASSERT(aNode);
     MOZ_ASSERT(aOther);
     return (aNode->mId > aOther->mId) - (aNode->mId < aOther->mId);
   }
 };
 
-typedef RedBlackTree<arena_t, ArenaTreeTrait> arena_tree_t;
-
 /********/
 /*
  * Chunks.
  */
 
 static malloc_rtree_t *chunk_rtree;
 
 /* Protects chunk-related data structures. */
@@ -1108,17 +1101,17 @@ static size_t		base_committed;
 /*
  * Arenas that are used to service external requests.  Not all elements of the
  * arenas array are necessarily used; arenas are created lazily as needed.
  */
 static arena_t** arenas;
 // A tree of arenas, arranged by id.
 // TODO: Move into arena_t as a static member when rb_tree doesn't depend on
 // the type being defined anymore.
-static arena_tree_t gArenaTree;
+static RedBlackTree<arena_t, ArenaTreeTrait> gArenaTree;
 static unsigned narenas;
 static malloc_spinlock_t arenas_lock; /* Protects arenas initialization. */
 
 #ifndef NO_TLS
 /*
  * The arena associated with the current thread (per jemalloc_thread_local_arena)
  * On OSX, __thread/thread_local circles back calling malloc to allocate storage
  * on first access on each thread, which leads to an infinite loop, but