Bug 1286948 - Renames Profiling to Instrument for prolog/epilogue. r=luke draft
authorYury Delendik <ydelendik@mozilla.com>
Fri, 02 Dec 2016 08:47:01 -0600
changeset 447069 9e4f6c346307a6f7851b0c1ade6294d4e7dc17f3
parent 447006 f65ad27efe839ce9df0283840a1a40b4bbc9ead0
child 447070 6260f56105ff87f6d948dcf9307ed0011bcd9153
push id37975
push userydelendik@mozilla.com
push dateFri, 02 Dec 2016 15:57:54 +0000
reviewersluke
bugs1286948
milestone53.0a1
Bug 1286948 - Renames Profiling to Instrument for prolog/epilogue. r=luke We will use the same prolog/epilogue to track frame pointer and return address for profiling and during debugging. Reflecting the same in the code and comments by using "instumentation" vs "profile". MozReview-Commit-ID: Krir2qcevKX
js/src/wasm/WasmCode.cpp
js/src/wasm/WasmCode.h
js/src/wasm/WasmFrameIterator.cpp
js/src/wasm/WasmFrameIterator.h
js/src/wasm/WasmGenerator.cpp
js/src/wasm/WasmGenerator.h
js/src/wasm/WasmInstance.cpp
js/src/wasm/WasmModule.cpp
js/src/wasm/WasmStubs.cpp
js/src/wasm/WasmStubs.h
js/src/wasm/WasmTypes.h
--- a/js/src/wasm/WasmCode.cpp
+++ b/js/src/wasm/WasmCode.cpp
@@ -334,69 +334,69 @@ FuncImport::deserialize(const uint8_t* c
 size_t
 FuncImport::sizeOfExcludingThis(MallocSizeOf mallocSizeOf) const
 {
     return sig_.sizeOfExcludingThis(mallocSizeOf);
 }
 
 CodeRange::CodeRange(Kind kind, Offsets offsets)
   : begin_(offsets.begin),
-    profilingReturn_(0),
+    instrumentReturn_(0),
     end_(offsets.end),
     funcIndex_(0),
     funcLineOrBytecode_(0),
     funcBeginToTableEntry_(0),
-    funcBeginToTableProfilingJump_(0),
-    funcBeginToNonProfilingEntry_(0),
-    funcProfilingJumpToProfilingReturn_(0),
-    funcProfilingEpilogueToProfilingReturn_(0),
+    funcBeginToTableInstrumentJump_(0),
+    funcBeginToNonInstrumentEntry_(0),
+    funcInstrumentJumpToInstrumentReturn_(0),
+    funcInstrumentEpilogueToInstrumentReturn_(0),
     kind_(kind)
 {
     MOZ_ASSERT(begin_ <= end_);
     MOZ_ASSERT(kind_ == Entry || kind_ == Inline || kind_ == FarJumpIsland);
 }
 
-CodeRange::CodeRange(Kind kind, ProfilingOffsets offsets)
+CodeRange::CodeRange(Kind kind, InstrumentOffsets offsets)
   : begin_(offsets.begin),
-    profilingReturn_(offsets.profilingReturn),
+    instrumentReturn_(offsets.instrumentReturn),
     end_(offsets.end),
     funcIndex_(0),
     funcLineOrBytecode_(0),
     funcBeginToTableEntry_(0),
-    funcBeginToTableProfilingJump_(0),
-    funcBeginToNonProfilingEntry_(0),
-    funcProfilingJumpToProfilingReturn_(0),
-    funcProfilingEpilogueToProfilingReturn_(0),
+    funcBeginToTableInstrumentJump_(0),
+    funcBeginToNonInstrumentEntry_(0),
+    funcInstrumentJumpToInstrumentReturn_(0),
+    funcInstrumentEpilogueToInstrumentReturn_(0),
     kind_(kind)
 {
-    MOZ_ASSERT(begin_ < profilingReturn_);
-    MOZ_ASSERT(profilingReturn_ < end_);
+    MOZ_ASSERT(begin_ < instrumentReturn_);
+    MOZ_ASSERT(instrumentReturn_ < end_);
     MOZ_ASSERT(kind_ == ImportJitExit || kind_ == ImportInterpExit || kind_ == TrapExit);
 }
 
 CodeRange::CodeRange(uint32_t funcIndex, uint32_t funcLineOrBytecode, FuncOffsets offsets)
   : begin_(offsets.begin),
-    profilingReturn_(offsets.profilingReturn),
+    instrumentReturn_(offsets.instrumentReturn),
     end_(offsets.end),
     funcIndex_(funcIndex),
     funcLineOrBytecode_(funcLineOrBytecode),
     funcBeginToTableEntry_(offsets.tableEntry - begin_),
-    funcBeginToTableProfilingJump_(offsets.tableProfilingJump - begin_),
-    funcBeginToNonProfilingEntry_(offsets.nonProfilingEntry - begin_),
-    funcProfilingJumpToProfilingReturn_(profilingReturn_ - offsets.profilingJump),
-    funcProfilingEpilogueToProfilingReturn_(profilingReturn_ - offsets.profilingEpilogue),
+    funcBeginToTableInstrumentJump_(offsets.tableInstrumentJump - begin_),
+    funcBeginToNonInstrumentEntry_(offsets.nonInstrumentEntry - begin_),
+    funcInstrumentJumpToInstrumentReturn_(instrumentReturn_ - offsets.instrumentJump),
+    funcInstrumentEpilogueToInstrumentReturn_(instrumentReturn_ - offsets.instrumentEpilogue),
     kind_(Function)
 {
-    MOZ_ASSERT(begin_ < profilingReturn_);
-    MOZ_ASSERT(profilingReturn_ < end_);
+    MOZ_ASSERT(begin_ < instrumentReturn_);
+    MOZ_ASSERT(instrumentReturn_ < end_);
     MOZ_ASSERT(offsets.tableEntry - begin_ <= UINT8_MAX);
-    MOZ_ASSERT(offsets.tableProfilingJump - begin_ <= UINT8_MAX);
-    MOZ_ASSERT(offsets.nonProfilingEntry - begin_ <= UINT8_MAX);
-    MOZ_ASSERT(profilingReturn_ - offsets.profilingJump <= UINT8_MAX);
-    MOZ_ASSERT(profilingReturn_ - offsets.profilingEpilogue <= UINT8_MAX);
+    MOZ_ASSERT(offsets.tableInstrumentJump - begin_ <= UINT8_MAX);
+    MOZ_ASSERT(offsets.nonInstrumentEntry - begin_ <= UINT8_MAX);
+    MOZ_ASSERT(instrumentReturn_ - offsets.instrumentJump <= UINT8_MAX);
+    MOZ_ASSERT(instrumentReturn_ - offsets.instrumentEpilogue <= UINT8_MAX);
 }
 
 static size_t
 StringLengthWithNullChar(const char* chars)
 {
     return chars ? strlen(chars) + 1 : 0;
 }
 
@@ -586,16 +586,17 @@ Metadata::getFuncName(JSContext* cx, con
 }
 
 Code::Code(UniqueCodeSegment segment,
            const Metadata& metadata,
            const ShareableBytes* maybeBytecode)
   : segment_(Move(segment)),
     metadata_(&metadata),
     maybeBytecode_(maybeBytecode),
+    instrumentModeCounter_(0),
     profilingEnabled_(false)
 {}
 
 struct CallSiteRetAddrOffset
 {
     const CallSiteVector& callSites;
     explicit CallSiteRetAddrOffset(const CallSiteVector& callSites) : callSites(callSites) {}
     uint32_t operator[](size_t index) const {
@@ -811,34 +812,44 @@ Code::ensureProfilingState(JSContext* cx
         }
     } else {
         funcLabels_.clear();
     }
 
     // Only mutate the code after the fallible operations are complete to avoid
     // the need to rollback.
     profilingEnabled_ = newProfilingEnabled;
-
-    {
-        AutoWritableJitCode awjc(cx->runtime(), segment_->base(), segment_->codeLength());
-        AutoFlushICache afc("Code::ensureProfilingState");
-        AutoFlushICache::setRange(uintptr_t(segment_->base()), segment_->codeLength());
-
-        for (const CallSite& callSite : metadata_->callSites)
-            ToggleProfiling(*this, callSite, newProfilingEnabled);
-        for (const CallThunk& callThunk : metadata_->callThunks)
-            ToggleProfiling(*this, callThunk, newProfilingEnabled);
-        for (const CodeRange& codeRange : metadata_->codeRanges)
-            ToggleProfiling(*this, codeRange, newProfilingEnabled);
-    }
-
+    adjustInstrumentMode(cx, newProfilingEnabled ? 1 : -1);
     return true;
 }
 
 void
+Code::adjustInstrumentMode(JSContext* cx, int32_t delta)
+{
+    MOZ_ASSERT_IF(delta < 0, instrumentModeCounter_ >= (uint32_t)-delta);
+    bool wasInstrumentModeEnabled = instrumentModeCounter_ > 0;
+    instrumentModeCounter_ += delta;
+    bool instrumentModeEnabled = instrumentModeCounter_ > 0;
+    if (wasInstrumentModeEnabled == instrumentModeEnabled)
+        return;
+
+    AutoWritableJitCode awjc(cx->runtime(), segment_->base(), segment_->codeLength());
+    AutoFlushICache afc("Code::adjustInstrumentMode");
+    AutoFlushICache::setRange(uintptr_t(segment_->base()), segment_->codeLength());
+
+    for (const CallSite& callSite : metadata_->callSites)
+        ToggleInstrument(*this, callSite, instrumentModeEnabled);
+    for (const CallThunk& callThunk : metadata_->callThunks)
+        ToggleInstrument(*this, callThunk, instrumentModeEnabled);
+    for (const CodeRange& codeRange : metadata_->codeRanges)
+        ToggleInstrument(*this, codeRange, instrumentModeEnabled);
+}
+
+
+void
 Code::addSizeOfMisc(MallocSizeOf mallocSizeOf,
                     Metadata::SeenSet* seenMetadata,
                     ShareableBytes::SeenSet* seenBytes,
                     size_t* code,
                     size_t* data) const
 {
     *code += segment_->codeLength();
     *data += mallocSizeOf(this) +
--- a/js/src/wasm/WasmCode.h
+++ b/js/src/wasm/WasmCode.h
@@ -51,19 +51,16 @@ class CodeSegment
     uint32_t globalDataLength_;
 
     // These are pointers into code for stubs used for asynchronous
     // signal-handler control-flow transfer.
     uint8_t* interruptCode_;
     uint8_t* outOfBoundsCode_;
     uint8_t* unalignedAccessCode_;
 
-    // The profiling mode may be changed dynamically.
-    bool profilingEnabled_;
-
     CodeSegment() { PodZero(this); }
     template <class> friend struct js::MallocProvider;
 
     CodeSegment(const CodeSegment&) = delete;
     CodeSegment(CodeSegment&&) = delete;
     void operator=(const CodeSegment&) = delete;
     void operator=(CodeSegment&&) = delete;
 
@@ -243,31 +240,31 @@ class CodeRange
         FarJumpIsland,     // inserted to connect otherwise out-of-range insns
         Inline             // stub that is jumped-to, not called, and thus
                            // replaces/loses preceding innermost frame
     };
 
   private:
     // All fields are treated as cacheable POD:
     uint32_t begin_;
-    uint32_t profilingReturn_;
+    uint32_t instrumentReturn_;
     uint32_t end_;
     uint32_t funcIndex_;
     uint32_t funcLineOrBytecode_;
     uint8_t funcBeginToTableEntry_;
-    uint8_t funcBeginToTableProfilingJump_;
-    uint8_t funcBeginToNonProfilingEntry_;
-    uint8_t funcProfilingJumpToProfilingReturn_;
-    uint8_t funcProfilingEpilogueToProfilingReturn_;
+    uint8_t funcBeginToTableInstrumentJump_;
+    uint8_t funcBeginToNonInstrumentEntry_;
+    uint8_t funcInstrumentJumpToInstrumentReturn_;
+    uint8_t funcInstrumentEpilogueToInstrumentReturn_;
     Kind kind_ : 8;
 
   public:
     CodeRange() = default;
     CodeRange(Kind kind, Offsets offsets);
-    CodeRange(Kind kind, ProfilingOffsets offsets);
+    CodeRange(Kind kind, InstrumentOffsets offsets);
     CodeRange(uint32_t funcIndex, uint32_t lineOrBytecode, FuncOffsets offsets);
 
     // All CodeRanges have a begin and end.
 
     uint32_t begin() const {
         return begin_;
     }
     uint32_t end() const {
@@ -288,50 +285,51 @@ class CodeRange
     }
     bool isTrapExit() const {
         return kind() == TrapExit;
     }
     bool isInline() const {
         return kind() == Inline;
     }
 
-    // Every CodeRange except entry and inline stubs has a profiling return
-    // which is used for asynchronous profiling to determine the frame pointer.
+    // Every CodeRange except entry and inline stubs has a instrumentation
+    // return which is used for asynchronous instrumentation to determine
+    // the frame pointer.
 
-    uint32_t profilingReturn() const {
+    uint32_t instrumentReturn() const {
         MOZ_ASSERT(isFunction() || isImportExit() || isTrapExit());
-        return profilingReturn_;
+        return instrumentReturn_;
     }
 
     // Functions have offsets which allow patching to selectively execute
-    // profiling prologues/epilogues.
+    // instrumentation prologues/epilogues.
 
-    uint32_t funcProfilingEntry() const {
+    uint32_t funcInstrumentEntry() const {
         MOZ_ASSERT(isFunction());
         return begin();
     }
     uint32_t funcTableEntry() const {
         MOZ_ASSERT(isFunction());
         return begin_ + funcBeginToTableEntry_;
     }
-    uint32_t funcTableProfilingJump() const {
+    uint32_t funcTableInstrumentJump() const {
         MOZ_ASSERT(isFunction());
-        return begin_ + funcBeginToTableProfilingJump_;
+        return begin_ + funcBeginToTableInstrumentJump_;
     }
-    uint32_t funcNonProfilingEntry() const {
+    uint32_t funcNonInstrumentEntry() const {
         MOZ_ASSERT(isFunction());
-        return begin_ + funcBeginToNonProfilingEntry_;
+        return begin_ + funcBeginToNonInstrumentEntry_;
     }
-    uint32_t funcProfilingJump() const {
+    uint32_t funcInstrumentJump() const {
         MOZ_ASSERT(isFunction());
-        return profilingReturn_ - funcProfilingJumpToProfilingReturn_;
+        return instrumentReturn_ - funcInstrumentJumpToInstrumentReturn_;
     }
-    uint32_t funcProfilingEpilogue() const {
+    uint32_t funcInstrumentEpilogue() const {
         MOZ_ASSERT(isFunction());
-        return profilingReturn_ - funcProfilingEpilogueToProfilingReturn_;
+        return instrumentReturn_ - funcInstrumentEpilogueToInstrumentReturn_;
     }
     uint32_t funcIndex() const {
         MOZ_ASSERT(isFunction());
         return funcIndex_;
     }
     uint32_t funcLineOrBytecode() const {
         MOZ_ASSERT(isFunction());
         return funcLineOrBytecode_;
@@ -349,18 +347,18 @@ class CodeRange
             return offset < rhs.begin();
         }
     };
 };
 
 WASM_DECLARE_POD_VECTOR(CodeRange, CodeRangeVector)
 
 // A CallThunk describes the offset and target of thunks so that they may be
-// patched at runtime when profiling is toggled. Thunks are emitted to connect
-// callsites that are too far away from callees to fit in a single call
+// patched at runtime when instrumentation is toggled. Thunks are emitted to
+// connect callsites that are too far away from callees to fit in a single call
 // instruction's relative offset.
 
 struct CallThunk
 {
     uint32_t offset;
     union {
         uint32_t funcIndex;
         uint32_t codeRangeIndex;
@@ -547,16 +545,17 @@ typedef UniquePtr<GeneratedSourceMap> Un
 
 class Code
 {
     const UniqueCodeSegment  segment_;
     const SharedMetadata     metadata_;
     const SharedBytes        maybeBytecode_;
     UniqueGeneratedSourceMap maybeSourceMap_;
     CacheableCharsVector     funcLabels_;
+    uint32_t                 instrumentModeCounter_;
     bool                     profilingEnabled_;
 
   public:
     Code(UniqueCodeSegment segment,
          const Metadata& metadata,
          const ShareableBytes* maybeBytecode);
 
     CodeSegment& segment() { return *segment_; }
@@ -596,16 +595,19 @@ class Code
 
     void addSizeOfMisc(MallocSizeOf mallocSizeOf,
                        Metadata::SeenSet* seenMetadata,
                        ShareableBytes::SeenSet* seenBytes,
                        size_t* code,
                        size_t* data) const;
 
     WASM_DECLARE_SERIALIZABLE(Code);
+
+private:
+    void adjustInstrumentMode(JSContext* cx, int32_t delta);
 };
 
 typedef UniquePtr<Code> UniqueCode;
 
 } // namespace wasm
 } // namespace js
 
 #endif // wasm_code_h
--- a/js/src/wasm/WasmFrameIterator.cpp
+++ b/js/src/wasm/WasmFrameIterator.cpp
@@ -265,18 +265,18 @@ PushRetAddr(MacroAssembler& masm)
     // The x86/x64 call instruction pushes the return address.
 #endif
 }
 
 // Generate a prologue that maintains WasmActivation::fp as the virtual frame
 // pointer so that ProfilingFrameIterator can walk the stack at any pc in
 // generated code.
 static void
-GenerateProfilingPrologue(MacroAssembler& masm, unsigned framePushed, ExitReason reason,
-                          ProfilingOffsets* offsets)
+GenerateInstrumentPrologue(MacroAssembler& masm, unsigned framePushed, ExitReason reason,
+                           InstrumentOffsets* offsets)
 {
     Register scratch = ABINonArgReg0;
 
     // ProfilingFrameIterator needs to know the offsets of several key
     // instructions from entry. To save space, we make these offsets static
     // constants and assert that they match the actual codegen below. On ARM,
     // this requires AutoForbidPools to prevent a constant pool from being
     // randomly inserted between two instructions.
@@ -300,20 +300,20 @@ GenerateProfilingPrologue(MacroAssembler
 
     if (reason != ExitReason::None)
         masm.store32(Imm32(int32_t(reason)), Address(scratch, WasmActivation::offsetOfExitReason()));
 
     if (framePushed)
         masm.subFromStackPtr(Imm32(framePushed));
 }
 
-// Generate the inverse of GenerateProfilingPrologue.
+// Generate the inverse of GenerateInstrumentPrologue.
 static void
-GenerateProfilingEpilogue(MacroAssembler& masm, unsigned framePushed, ExitReason reason,
-                          ProfilingOffsets* offsets)
+GenerateInstrumentEpilogue(MacroAssembler& masm, unsigned framePushed, ExitReason reason,
+                           InstrumentOffsets* offsets)
 {
     Register scratch = ABINonArgReturnReg0;
 #if defined(JS_CODEGEN_ARM) || defined(JS_CODEGEN_ARM64) || \
     defined(JS_CODEGEN_MIPS32) || defined(JS_CODEGEN_MIPS64)
     Register scratch2 = ABINonArgReturnReg1;
 #endif
 
     if (framePushed)
@@ -322,18 +322,18 @@ GenerateProfilingEpilogue(MacroAssembler
     masm.loadWasmActivationFromSymbolicAddress(scratch);
 
     if (reason != ExitReason::None) {
         masm.store32(Imm32(int32_t(ExitReason::None)),
                      Address(scratch, WasmActivation::offsetOfExitReason()));
     }
 
     // ProfilingFrameIterator assumes fixed offsets of the last few
-    // instructions from profilingReturn, so AutoForbidPools to ensure that
-    // unintended instructions are not automatically inserted.
+    // instructions from instrumentReturn, so AutoForbidPools to ensure
+    // that unintended instructions are not automatically inserted.
     {
 #if defined(JS_CODEGEN_ARM)
         AutoForbidPools afp(&masm, /* number of instructions in scope = */ 4);
 #endif
 
         // sp protects the stack from clobber via asynchronous signal handlers
         // and the async interrupt exit. Since activation.fp can be read at any
         // time and still points to the current frame, be careful to only update
@@ -345,41 +345,42 @@ GenerateProfilingEpilogue(MacroAssembler
         DebugOnly<uint32_t> prePop = masm.currentOffset();
         masm.addToStackPtr(Imm32(sizeof(void *)));
         MOZ_ASSERT_IF(!masm.oom(), PostStorePrePopFP == masm.currentOffset() - prePop);
 #else
         masm.pop(Address(scratch, WasmActivation::offsetOfFP()));
         MOZ_ASSERT(PostStorePrePopFP == 0);
 #endif
 
-        offsets->profilingReturn = masm.currentOffset();
+        offsets->instrumentReturn = masm.currentOffset();
         masm.ret();
     }
 }
 
-// In profiling mode, we need to maintain fp so that we can unwind the stack at
-// any pc. In non-profiling mode, the only way to observe WasmActivation::fp is
-// to call out to C++ so, as an optimization, we don't update fp. To avoid
-// recompilation when the profiling mode is toggled, we generate both prologues
-// a priori and switch between prologues when the profiling mode is toggled.
-// Specifically, ToggleProfiling patches all callsites to either call the
-// profiling or non-profiling entry point.
+// In instrumentation mode, we need to maintain fp so that we can unwind
+// the stack at any pc. In non-instrumentation mode, the only way to observe
+// WasmActivation::fp is to call out to C++ so, as an optimization, we don't
+// update fp. To avoid recompilation when the instrumentation mode is toggled,
+// we generate both prologues a priori and switch between prologues when
+// the instrumentation mode is toggled. Specifically, ToggleInstrument patches
+// all callsites to either call the instrumentation or non-instrumentation entry
+// point.
 void
 wasm::GenerateFunctionPrologue(MacroAssembler& masm, unsigned framePushed, const SigIdDesc& sigId,
                                FuncOffsets* offsets)
 {
 #if defined(JS_CODEGEN_ARM)
     // Flush pending pools so they do not get dumped between the 'begin' and
     // 'entry' offsets since the difference must be less than UINT8_MAX.
     masm.flushBuffer();
 #endif
 
     masm.haltingAlign(CodeAlignment);
 
-    GenerateProfilingPrologue(masm, framePushed, ExitReason::None, offsets);
+    GenerateInstrumentPrologue(masm, framePushed, ExitReason::None, offsets);
     Label body;
     masm.jump(&body);
 
     // Generate table entry thunk:
     masm.haltingAlign(CodeAlignment);
     offsets->tableEntry = masm.currentOffset();
     TrapOffset trapOffset(0);  // ignored by masm.wasmEmitTrapOutOfLineCode
     TrapDesc trap(trapOffset, Trap::IndirectCallBadSig, masm.framePushed());
@@ -391,76 +392,76 @@ wasm::GenerateFunctionPrologue(MacroAsse
         break;
       }
       case SigIdDesc::Kind::Immediate:
         masm.branch32(Assembler::Condition::NotEqual, WasmTableCallSigReg, Imm32(sigId.immediate()), trap);
         break;
       case SigIdDesc::Kind::None:
         break;
     }
-    offsets->tableProfilingJump = masm.nopPatchableToNearJump().offset();
+    offsets->tableInstrumentJump = masm.nopPatchableToNearJump().offset();
 
     // Generate normal prologue:
     masm.nopAlign(CodeAlignment);
-    offsets->nonProfilingEntry = masm.currentOffset();
+    offsets->nonInstrumentEntry = masm.currentOffset();
     PushRetAddr(masm);
     masm.subFromStackPtr(Imm32(framePushed + FrameBytesAfterReturnAddress));
 
     // Prologue join point, body begin:
     masm.bind(&body);
     masm.setFramePushed(framePushed);
 }
 
-// Similar to GenerateFunctionPrologue (see comment), we generate both a
-// profiling and non-profiling epilogue a priori. When the profiling mode is
-// toggled, ToggleProfiling patches the 'profiling jump' to either be a nop
-// (falling through to the normal prologue) or a jump (jumping to the profiling
-// epilogue).
+// Similar to GenerateFunctionPrologue (see comment), we generate both
+// a instrumentation and non-instrumentation epilogue a priori. When
+// the instrumentation mode is toggled, ToggleInstrumentation patches
+// the 'instrumentation jump' to either be a nop (falling through to
+// the normal prologue) or a jump (jumping to the instrumentation epilogue).
 void
 wasm::GenerateFunctionEpilogue(MacroAssembler& masm, unsigned framePushed, FuncOffsets* offsets)
 {
     MOZ_ASSERT(masm.framePushed() == framePushed);
 
 #if defined(JS_CODEGEN_ARM)
-    // Flush pending pools so they do not get dumped between the profilingReturn
-    // and profilingJump/profilingEpilogue offsets since the difference must be
-    // less than UINT8_MAX.
+    // Flush pending pools so they do not get dumped between
+    // the instrumentReturn and instrumentJump/instrumentEpilogue
+    // offsets since the difference must be less than UINT8_MAX.
     masm.flushBuffer();
 #endif
 
-    // Generate a nop that is overwritten by a jump to the profiling epilogue
-    // when profiling is enabled.
-    offsets->profilingJump = masm.nopPatchableToNearJump().offset();
+    // Generate a nop that is overwritten by a jump to the instrumentation
+    // epilogue when instrumentation is enabled.
+    offsets->instrumentJump = masm.nopPatchableToNearJump().offset();
 
     // Normal epilogue:
     masm.addToStackPtr(Imm32(framePushed + FrameBytesAfterReturnAddress));
     masm.ret();
     masm.setFramePushed(0);
 
-    // Profiling epilogue:
-    offsets->profilingEpilogue = masm.currentOffset();
-    GenerateProfilingEpilogue(masm, framePushed, ExitReason::None, offsets);
+    // Instrumentation epilogue:
+    offsets->instrumentEpilogue = masm.currentOffset();
+    GenerateInstrumentEpilogue(masm, framePushed, ExitReason::None, offsets);
 }
 
 void
 wasm::GenerateExitPrologue(MacroAssembler& masm, unsigned framePushed, ExitReason reason,
-                           ProfilingOffsets* offsets)
+                           InstrumentOffsets* offsets)
 {
     masm.haltingAlign(CodeAlignment);
-    GenerateProfilingPrologue(masm, framePushed, reason, offsets);
+    GenerateInstrumentPrologue(masm, framePushed, reason, offsets);
     masm.setFramePushed(framePushed);
 }
 
 void
 wasm::GenerateExitEpilogue(MacroAssembler& masm, unsigned framePushed, ExitReason reason,
-                           ProfilingOffsets* offsets)
+                           InstrumentOffsets* offsets)
 {
     // Inverse of GenerateExitPrologue:
     MOZ_ASSERT(masm.framePushed() == framePushed);
-    GenerateProfilingEpilogue(masm, framePushed, reason, offsets);
+    GenerateInstrumentEpilogue(masm, framePushed, reason, offsets);
     masm.setFramePushed(0);
 }
 
 /*****************************************************************************/
 // ProfilingFrameIterator
 
 ProfilingFrameIterator::ProfilingFrameIterator()
   : activation_(nullptr),
@@ -584,17 +585,17 @@ typedef JS::ProfilingFrameIterator::Regi
 static bool
 InThunk(const CodeRange& codeRange, uint32_t offsetInModule)
 {
     if (codeRange.kind() == CodeRange::FarJumpIsland)
         return true;
 
     return codeRange.isFunction() &&
            offsetInModule >= codeRange.funcTableEntry() &&
-           offsetInModule < codeRange.funcNonProfilingEntry();
+           offsetInModule < codeRange.funcNonInstrumentEntry();
 }
 
 ProfilingFrameIterator::ProfilingFrameIterator(const WasmActivation& activation,
                                                const RegisterState& state)
   : activation_(&activation),
     code_(nullptr),
     codeRange_(nullptr),
     callerFP_(nullptr),
@@ -644,25 +645,25 @@ ProfilingFrameIterator::ProfilingFrameIt
         void** sp = (void**)state.sp;
 #if defined(JS_CODEGEN_ARM) || defined(JS_CODEGEN_MIPS32) || defined(JS_CODEGEN_MIPS64)
         if (offsetInCodeRange < PushedRetAddr || InThunk(*codeRange, offsetInModule)) {
             // First instruction of the ARM/MIPS function; the return address is
             // still in lr and fp still holds the caller's fp.
             callerPC_ = state.lr;
             callerFP_ = fp;
             AssertMatchesCallSite(*activation_, callerPC_, callerFP_, sp - 2);
-        } else if (offsetInModule == codeRange->profilingReturn() - PostStorePrePopFP) {
+        } else if (offsetInModule == codeRange->instrumentReturn() - PostStorePrePopFP) {
             // Second-to-last instruction of the ARM/MIPS function; fp points to
             // the caller's fp; have not yet popped Frame.
             callerPC_ = ReturnAddressFromFP(sp);
             callerFP_ = CallerFPFromFP(sp);
             AssertMatchesCallSite(*activation_, callerPC_, callerFP_, sp);
         } else
 #endif
-        if (offsetInCodeRange < PushedFP || offsetInModule == codeRange->profilingReturn() ||
+        if (offsetInCodeRange < PushedFP || offsetInModule == codeRange->instrumentReturn() ||
             InThunk(*codeRange, offsetInModule))
         {
             // The return address has been pushed on the stack but not fp; fp
             // still points to the caller's fp.
             callerPC_ = *sp;
             callerFP_ = fp;
             AssertMatchesCallSite(*activation_, callerPC_, callerFP_, sp - 1);
         } else if (offsetInCodeRange < StoredFP) {
@@ -797,17 +798,17 @@ ProfilingFrameIterator::label() const
 
     MOZ_CRASH("bad code range kind");
 }
 
 /*****************************************************************************/
 // Runtime patching to enable/disable profiling
 
 void
-wasm::ToggleProfiling(const Code& code, const CallSite& callSite, bool enabled)
+wasm::ToggleInstrument(const Code& code, const CallSite& callSite, bool enabled)
 {
     if (callSite.kind() != CallSite::Func)
         return;
 
     uint8_t* callerRetAddr = code.segment().base() + callSite.returnAddressOffset();
 
 #if defined(JS_CODEGEN_X86) || defined(JS_CODEGEN_X64)
     void* callee = X86Encoding::GetRel32Target(callerRetAddr);
@@ -833,18 +834,18 @@ wasm::ToggleProfiling(const Code& code, 
 #else
 # error "Missing architecture"
 #endif
 
     const CodeRange* codeRange = code.lookupRange(callee);
     if (!codeRange->isFunction())
         return;
 
-    uint8_t* from = code.segment().base() + codeRange->funcNonProfilingEntry();
-    uint8_t* to = code.segment().base() + codeRange->funcProfilingEntry();
+    uint8_t* from = code.segment().base() + codeRange->funcNonInstrumentEntry();
+    uint8_t* to = code.segment().base() + codeRange->funcInstrumentEntry();
     if (!enabled)
         Swap(from, to);
 
     MOZ_ASSERT(callee == from);
 
 #if defined(JS_CODEGEN_X86) || defined(JS_CODEGEN_X64)
     X86Encoding::SetRel32(callerRetAddr, to);
 #elif defined(JS_CODEGEN_ARM)
@@ -857,35 +858,35 @@ wasm::ToggleProfiling(const Code& code, 
 #elif defined(JS_CODEGEN_NONE)
     MOZ_CRASH();
 #else
 # error "Missing architecture"
 #endif
 }
 
 void
-wasm::ToggleProfiling(const Code& code, const CallThunk& callThunk, bool enabled)
+wasm::ToggleInstrument(const Code& code, const CallThunk& callThunk, bool enabled)
 {
     const CodeRange& cr = code.metadata().codeRanges[callThunk.u.codeRangeIndex];
-    uint32_t calleeOffset = enabled ? cr.funcProfilingEntry() : cr.funcNonProfilingEntry();
+    uint32_t calleeOffset = enabled ? cr.funcInstrumentEntry() : cr.funcNonInstrumentEntry();
     MacroAssembler::repatchFarJump(code.segment().base(), callThunk.offset, calleeOffset);
 }
 
 void
-wasm::ToggleProfiling(const Code& code, const CodeRange& codeRange, bool enabled)
+wasm::ToggleInstrument(const Code& code, const CodeRange& codeRange, bool enabled)
 {
     if (!codeRange.isFunction())
         return;
 
     uint8_t* codeBase = code.segment().base();
-    uint8_t* profilingEntry     = codeBase + codeRange.funcProfilingEntry();
-    uint8_t* tableProfilingJump = codeBase + codeRange.funcTableProfilingJump();
-    uint8_t* profilingJump      = codeBase + codeRange.funcProfilingJump();
-    uint8_t* profilingEpilogue  = codeBase + codeRange.funcProfilingEpilogue();
+    uint8_t* instrumentEntry     = codeBase + codeRange.funcInstrumentEntry();
+    uint8_t* tableInstrumentJump = codeBase + codeRange.funcTableInstrumentJump();
+    uint8_t* instrumentJump      = codeBase + codeRange.funcInstrumentJump();
+    uint8_t* profilingEpilogue  = codeBase + codeRange.funcInstrumentEpilogue();
 
     if (enabled) {
-        MacroAssembler::patchNopToNearJump(tableProfilingJump, profilingEntry);
-        MacroAssembler::patchNopToNearJump(profilingJump, profilingEpilogue);
+        MacroAssembler::patchNopToNearJump(tableInstrumentJump, instrumentEntry);
+        MacroAssembler::patchNopToNearJump(instrumentJump, profilingEpilogue);
     } else {
-        MacroAssembler::patchNearJumpToNop(tableProfilingJump);
-        MacroAssembler::patchNearJumpToNop(profilingJump);
+        MacroAssembler::patchNearJumpToNop(tableInstrumentJump);
+        MacroAssembler::patchNearJumpToNop(instrumentJump);
     }
 }
--- a/js/src/wasm/WasmFrameIterator.h
+++ b/js/src/wasm/WasmFrameIterator.h
@@ -31,17 +31,17 @@ namespace jit { class MacroAssembler; }
 namespace wasm {
 
 class CallSite;
 class Code;
 class CodeRange;
 class SigIdDesc;
 struct CallThunk;
 struct FuncOffsets;
-struct ProfilingOffsets;
+struct InstrumentOffsets;
 struct TrapOffset;
 
 // Iterates over the frames of a single WasmActivation, called synchronously
 // from C++ in the thread of the asm.js.
 //
 // The one exception is that this iterator may be called from the interrupt
 // callback which may be called asynchronously from asm.js code; in this case,
 // the backtrace may not be correct. That being said, we try our best printing
@@ -110,33 +110,33 @@ class ProfilingFrameIterator
     void* stackAddress() const { MOZ_ASSERT(!done()); return stackAddress_; }
     const char* label() const;
 };
 
 // Prologue/epilogue code generation
 
 void
 GenerateExitPrologue(jit::MacroAssembler& masm, unsigned framePushed, ExitReason reason,
-                     ProfilingOffsets* offsets);
+                     InstrumentOffsets* offsets);
 void
 GenerateExitEpilogue(jit::MacroAssembler& masm, unsigned framePushed, ExitReason reason,
-                     ProfilingOffsets* offsets);
+                     InstrumentOffsets* offsets);
 void
 GenerateFunctionPrologue(jit::MacroAssembler& masm, unsigned framePushed, const SigIdDesc& sigId,
                          FuncOffsets* offsets);
 void
 GenerateFunctionEpilogue(jit::MacroAssembler& masm, unsigned framePushed, FuncOffsets* offsets);
 
 // Runtime patching to enable/disable profiling
 
 void
-ToggleProfiling(const Code& code, const CallSite& callSite, bool enabled);
+ToggleInstrument(const Code& code, const CallSite& callSite, bool enabled);
 
 void
-ToggleProfiling(const Code& code, const CallThunk& callThunk, bool enabled);
+ToggleInstrument(const Code& code, const CallThunk& callThunk, bool enabled);
 
 void
-ToggleProfiling(const Code& code, const CodeRange& codeRange, bool enabled);
+ToggleInstrument(const Code& code, const CodeRange& codeRange, bool enabled);
 
 } // namespace wasm
 } // namespace js
 
 #endif // wasm_frame_iterator_h
--- a/js/src/wasm/WasmGenerator.cpp
+++ b/js/src/wasm/WasmGenerator.cpp
@@ -296,17 +296,17 @@ ModuleGenerator::patchCallSites(TrapExit
         MOZ_RELEASE_ASSERT(callerOffset < INT32_MAX);
 
         switch (cs.kind()) {
           case CallSiteDesc::Dynamic:
           case CallSiteDesc::Symbolic:
             break;
           case CallSiteDesc::Func: {
             if (funcIsCompiled(cs.funcIndex())) {
-                uint32_t calleeOffset = funcCodeRange(cs.funcIndex()).funcNonProfilingEntry();
+                uint32_t calleeOffset = funcCodeRange(cs.funcIndex()).funcNonInstrumentEntry();
                 MOZ_RELEASE_ASSERT(calleeOffset < INT32_MAX);
 
                 if (uint32_t(abs(int32_t(calleeOffset) - int32_t(callerOffset))) < JumpRange()) {
                     masm_.patchCall(callerOffset, calleeOffset);
                     break;
                 }
             }
 
@@ -370,17 +370,17 @@ bool
 ModuleGenerator::patchFarJumps(const TrapExitOffsetArray& trapExits)
 {
     MacroAssembler::AutoPrepareForPatching patching(masm_);
 
     for (CallThunk& callThunk : metadata_->callThunks) {
         uint32_t funcIndex = callThunk.u.funcIndex;
         callThunk.u.codeRangeIndex = funcToCodeRange_[funcIndex];
         CodeOffset farJump(callThunk.offset);
-        masm_.patchFarJump(farJump, funcCodeRange(funcIndex).funcNonProfilingEntry());
+        masm_.patchFarJump(farJump, funcCodeRange(funcIndex).funcNonInstrumentEntry());
     }
 
     for (const TrapFarJump& farJump : masm_.trapFarJumps())
         masm_.patchFarJump(farJump.jump, trapExits[farJump.trap].begin);
 
     return true;
 }
 
@@ -465,34 +465,34 @@ ModuleGenerator::finishFuncExports()
         uint32_t codeRangeIndex = funcToCodeRange_[funcIndex];
         metadata_->funcExports.infallibleEmplaceBack(Move(sig), funcIndex, codeRangeIndex);
     }
 
     return true;
 }
 
 typedef Vector<Offsets, 0, SystemAllocPolicy> OffsetVector;
-typedef Vector<ProfilingOffsets, 0, SystemAllocPolicy> ProfilingOffsetVector;
+typedef Vector<InstrumentOffsets, 0, SystemAllocPolicy> InstrumentOffsetVector;
 
 bool
 ModuleGenerator::finishCodegen()
 {
     masm_.haltingAlign(CodeAlignment);
     uint32_t offsetInWhole = masm_.size();
 
     uint32_t numFuncExports = metadata_->funcExports.length();
     MOZ_ASSERT(numFuncExports == exportedFuncs_.count());
 
     // Generate stubs in a separate MacroAssembler since, otherwise, for modules
     // larger than the JumpImmediateRange, even local uses of Label will fail
     // due to the large absolute offsets temporarily stored by Label::bind().
 
     OffsetVector entries;
-    ProfilingOffsetVector interpExits;
-    ProfilingOffsetVector jitExits;
+    InstrumentOffsetVector interpExits;
+    InstrumentOffsetVector jitExits;
     TrapExitOffsetArray trapExits;
     Offsets outOfBoundsExit;
     Offsets unalignedAccessExit;
     Offsets interruptExit;
     Offsets throwStub;
 
     {
         TempAllocator alloc(&lifo_);
--- a/js/src/wasm/WasmGenerator.h
+++ b/js/src/wasm/WasmGenerator.h
@@ -163,17 +163,17 @@ class CompileTask
 // functions, ModuleGenerator::finish() must be called to complete the
 // compilation and extract the resulting wasm module.
 
 class MOZ_STACK_CLASS ModuleGenerator
 {
     typedef HashSet<uint32_t, DefaultHasher<uint32_t>, SystemAllocPolicy> Uint32Set;
     typedef Vector<CompileTask, 0, SystemAllocPolicy> CompileTaskVector;
     typedef Vector<CompileTask*, 0, SystemAllocPolicy> CompileTaskPtrVector;
-    typedef EnumeratedArray<Trap, Trap::Limit, ProfilingOffsets> TrapExitOffsetArray;
+    typedef EnumeratedArray<Trap, Trap::Limit, InstrumentOffsets> TrapExitOffsetArray;
 
     // Constant parameters
     bool                            alwaysBaseline_;
 
     // Data that is moved into the result of finish()
     Assumptions                     assumptions_;
     LinkData                        linkData_;
     MutableMetadata                 metadata_;
--- a/js/src/wasm/WasmInstance.cpp
+++ b/js/src/wasm/WasmInstance.cpp
@@ -344,17 +344,17 @@ Instance::Instance(JSContext* cx,
         HandleFunction f = funcImports[i];
         const FuncImport& fi = metadata().funcImports[i];
         FuncImportTls& import = funcImportTls(fi);
         if (!isAsmJS() && IsExportedWasmFunction(f)) {
             WasmInstanceObject* calleeInstanceObj = ExportedFunctionToInstanceObject(f);
             const CodeRange& codeRange = calleeInstanceObj->getExportedFunctionCodeRange(f);
             Instance& calleeInstance = calleeInstanceObj->instance();
             import.tls = &calleeInstance.tlsData_;
-            import.code = calleeInstance.codeSegment().base() + codeRange.funcNonProfilingEntry();
+            import.code = calleeInstance.codeSegment().base() + codeRange.funcNonInstrumentEntry();
             import.baselineScript = nullptr;
             import.obj = calleeInstanceObj;
         } else {
             import.tls = &tlsData_;
             import.code = codeBase() + fi.interpExitCodeOffset();
             import.baselineScript = nullptr;
             import.obj = f;
         }
@@ -776,18 +776,18 @@ Instance::deoptimizeImportExit(uint32_t 
     import.code = codeBase() + fi.interpExitCodeOffset();
     import.baselineScript = nullptr;
 }
 
 static void
 UpdateEntry(const Code& code, bool profilingEnabled, void** entry)
 {
     const CodeRange& codeRange = *code.lookupRange(*entry);
-    void* from = code.segment().base() + codeRange.funcNonProfilingEntry();
-    void* to = code.segment().base() + codeRange.funcProfilingEntry();
+    void* from = code.segment().base() + codeRange.funcNonInstrumentEntry();
+    void* to = code.segment().base() + codeRange.funcInstrumentEntry();
 
     if (!profilingEnabled)
         Swap(from, to);
 
     MOZ_ASSERT(*entry == from);
     *entry = to;
 }
 
--- a/js/src/wasm/WasmModule.cpp
+++ b/js/src/wasm/WasmModule.cpp
@@ -417,21 +417,21 @@ Module::extractCode(JSContext* cx, Mutab
         if (!JS_DefineProperty(cx, segment, "kind", value, JSPROP_ENUMERATE))
             return false;
 
         if (p.isFunction()) {
             value.setNumber((uint32_t)p.funcIndex());
             if (!JS_DefineProperty(cx, segment, "funcIndex", value, JSPROP_ENUMERATE))
                 return false;
 
-            value.setNumber((uint32_t)p.funcNonProfilingEntry());
+            value.setNumber((uint32_t)p.funcNonInstrumentEntry());
             if (!JS_DefineProperty(cx, segment, "funcBodyBegin", value, JSPROP_ENUMERATE))
                 return false;
 
-            value.setNumber((uint32_t)p.funcProfilingEpilogue());
+            value.setNumber((uint32_t)p.funcInstrumentEpilogue());
             if (!JS_DefineProperty(cx, segment, "funcBodyEnd", value, JSPROP_ENUMERATE))
                 return false;
         }
 
         if (!NewbornArrayPush(cx, segments, ObjectValue(*segment)))
             return false;
     }
 
@@ -522,18 +522,18 @@ Module::initSegments(JSContext* cx,
                 WasmInstanceObject* exportInstanceObj = ExportedFunctionToInstanceObject(f);
                 const CodeRange& cr = exportInstanceObj->getExportedFunctionCodeRange(f);
                 Instance& exportInstance = exportInstanceObj->instance();
                 table.set(offset + i, exportInstance.codeBase() + cr.funcTableEntry(), exportInstance);
             } else {
                 const CodeRange& cr = codeRanges[seg.elemCodeRangeIndices[i]];
                 uint32_t entryOffset = table.isTypedFunction()
                                        ? profilingEnabled
-                                         ? cr.funcProfilingEntry()
-                                         : cr.funcNonProfilingEntry()
+                                         ? cr.funcInstrumentEntry()
+                                         : cr.funcNonInstrumentEntry()
                                        : cr.funcTableEntry();
                 table.set(offset + i, codeBase + entryOffset, instance);
             }
         }
     }
 
     if (memoryObj) {
         uint8_t* memoryBase = memoryObj->buffer().dataPointerEither().unwrap(/* memcpy */);
--- a/js/src/wasm/WasmStubs.cpp
+++ b/js/src/wasm/WasmStubs.cpp
@@ -489,17 +489,17 @@ wasm::GenerateImportFunction(jit::MacroA
     GenerateFunctionEpilogue(masm, framePushed, &offsets);
     offsets.end = masm.currentOffset();
     return offsets;
 }
 
 // Generate a stub that is called via the internal ABI derived from the
 // signature of the import and calls into an appropriate callImport C++
 // function, having boxed all the ABI arguments into a homogeneous Value array.
-ProfilingOffsets
+InstrumentOffsets
 wasm::GenerateImportInterpExit(MacroAssembler& masm, const FuncImport& fi, uint32_t funcImportIndex,
                                Label* throwLabel)
 {
     masm.setFramePushed(0);
 
     // Argument types for Module::callImport_*:
     static const MIRType typeArray[] = { MIRType::Pointer,   // Instance*
                                          MIRType::Pointer,   // funcImportIndex
@@ -511,17 +511,17 @@ wasm::GenerateImportInterpExit(MacroAsse
     // At the point of the call, the stack layout shall be (sp grows to the left):
     //   | stack args | padding | Value argv[] | padding | retaddr | caller stack args |
     // The padding between stack args and argv ensures that argv is aligned. The
     // padding between argv and retaddr ensures that sp is aligned.
     unsigned argOffset = AlignBytes(StackArgBytes(invokeArgTypes), sizeof(double));
     unsigned argBytes = Max<size_t>(1, fi.sig().args().length()) * sizeof(Value);
     unsigned framePushed = StackDecrementForCall(masm, ABIStackAlignment, argOffset + argBytes);
 
-    ProfilingOffsets offsets;
+    InstrumentOffsets offsets;
     GenerateExitPrologue(masm, framePushed, ExitReason::ImportInterp, &offsets);
 
     // Fill the argument array.
     unsigned offsetToCallerStackArgs = sizeof(Frame) + masm.framePushed();
     Register scratch = ABINonArgReturnReg0;
     FillArgumentArray(masm, fi.sig().args(), argOffset, offsetToCallerStackArgs, scratch, ToValue(false));
 
     // Prepare the arguments for the call to Module::callImport_*.
@@ -622,17 +622,17 @@ wasm::GenerateImportInterpExit(MacroAsse
     return offsets;
 }
 
 static const unsigned SavedTlsReg = sizeof(void*);
 
 // Generate a stub that is called via the internal ABI derived from the
 // signature of the import and calls into a compatible JIT function,
 // having boxed all the ABI arguments into the JIT stack frame layout.
-ProfilingOffsets
+InstrumentOffsets
 wasm::GenerateImportJitExit(MacroAssembler& masm, const FuncImport& fi, Label* throwLabel)
 {
     masm.setFramePushed(0);
 
     // JIT calls use the following stack layout (sp grows to the left):
     //   | retaddr | descriptor | callee | argc | this | arg1..N |
     // After the JIT frame, the global register (if present) is saved since the
     // JIT's ABI does not preserve non-volatile regs. Also, unlike most ABIs,
@@ -640,17 +640,17 @@ wasm::GenerateImportJitExit(MacroAssembl
     // the return address.
     static_assert(WasmStackAlignment >= JitStackAlignment, "subsumes");
     unsigned sizeOfRetAddr = sizeof(void*);
     unsigned jitFrameBytes = 3 * sizeof(void*) + (1 + fi.sig().args().length()) * sizeof(Value);
     unsigned totalJitFrameBytes = sizeOfRetAddr + jitFrameBytes + SavedTlsReg;
     unsigned jitFramePushed = StackDecrementForCall(masm, JitStackAlignment, totalJitFrameBytes) -
                               sizeOfRetAddr;
 
-    ProfilingOffsets offsets;
+    InstrumentOffsets offsets;
     GenerateExitPrologue(masm, jitFramePushed, ExitReason::ImportJit, &offsets);
 
     // 1. Descriptor
     size_t argOffset = 0;
     uint32_t descriptor = MakeFrameDescriptor(jitFramePushed, JitFrame_Entry,
                                               JitFrameLayout::Size());
     masm.storePtr(ImmWord(uintptr_t(descriptor)), Address(masm.getStackPointer(), argOffset));
     argOffset += sizeof(size_t);
@@ -861,29 +861,29 @@ wasm::GenerateImportJitExit(MacroAssembl
     offsets.end = masm.currentOffset();
     return offsets;
 }
 
 // Generate a stub that calls into ReportTrap with the right trap reason.
 // This stub is called with ABIStackAlignment by a trap out-of-line path. A
 // profiling prologue/epilogue is used so that stack unwinding picks up the
 // current WasmActivation. Unwinding will begin at the caller of this trap exit.
-ProfilingOffsets
+InstrumentOffsets
 wasm::GenerateTrapExit(MacroAssembler& masm, Trap trap, Label* throwLabel)
 {
     masm.haltingAlign(CodeAlignment);
 
     masm.setFramePushed(0);
 
     MIRTypeVector args;
     MOZ_ALWAYS_TRUE(args.append(MIRType::Int32));
 
     uint32_t framePushed = StackDecrementForCall(masm, ABIStackAlignment, args);
 
-    ProfilingOffsets offsets;
+    InstrumentOffsets offsets;
     GenerateExitPrologue(masm, framePushed, ExitReason::Trap, &offsets);
 
     ABIArgMIRTypeIter i(args);
     if (i->kind() == ABIArg::GPR)
         masm.move32(Imm32(int32_t(trap)), i->gpr());
     else
         masm.store32(Imm32(int32_t(trap)), Address(masm.getStackPointer(), i->offsetFromArgBase()));
     i++;
--- a/js/src/wasm/WasmStubs.h
+++ b/js/src/wasm/WasmStubs.h
@@ -31,24 +31,24 @@ class FuncExport;
 class FuncImport;
 
 extern Offsets
 GenerateEntry(jit::MacroAssembler& masm, const FuncExport& fe);
 
 extern FuncOffsets
 GenerateImportFunction(jit::MacroAssembler& masm, const FuncImport& fi, SigIdDesc sigId);
 
-extern ProfilingOffsets
+extern InstrumentOffsets
 GenerateImportInterpExit(jit::MacroAssembler& masm, const FuncImport& fi, uint32_t funcImportIndex,
                          jit::Label* throwLabel);
 
-extern ProfilingOffsets
+extern InstrumentOffsets
 GenerateImportJitExit(jit::MacroAssembler& masm, const FuncImport& fi, jit::Label* throwLabel);
 
-extern ProfilingOffsets
+extern InstrumentOffsets
 GenerateTrapExit(jit::MacroAssembler& masm, Trap trap, jit::Label* throwLabel);
 
 extern Offsets
 GenerateOutOfBoundsExit(jit::MacroAssembler& masm, jit::Label* throwLabel);
 
 extern Offsets
 GenerateUnalignedExit(jit::MacroAssembler& masm, jit::Label* throwLabel);
 
--- a/js/src/wasm/WasmTypes.h
+++ b/js/src/wasm/WasmTypes.h
@@ -789,17 +789,17 @@ struct SigWithId : Sig
     void operator=(Sig&& rhs) { Sig::operator=(Move(rhs)); }
 
     WASM_DECLARE_SERIALIZABLE(SigWithId)
 };
 
 typedef Vector<SigWithId, 0, SystemAllocPolicy> SigWithIdVector;
 typedef Vector<const SigWithId*, 0, SystemAllocPolicy> SigWithIdPtrVector;
 
-// The (,Profiling,Func)Offsets classes are used to record the offsets of
+// The (,Instrument,Func)Offsets classes are used to record the offsets of
 // different key points in a CodeRange during compilation.
 
 struct Offsets
 {
     explicit Offsets(uint32_t begin = 0, uint32_t end = 0)
       : begin(begin), end(end)
     {}
 
@@ -809,72 +809,74 @@ struct Offsets
     uint32_t end;
 
     void offsetBy(uint32_t offset) {
         begin += offset;
         end += offset;
     }
 };
 
-struct ProfilingOffsets : Offsets
+struct InstrumentOffsets : Offsets
 {
-    MOZ_IMPLICIT ProfilingOffsets(uint32_t profilingReturn = 0)
-      : Offsets(), profilingReturn(profilingReturn)
+    MOZ_IMPLICIT InstrumentOffsets(uint32_t instrumentReturn = 0)
+      : Offsets(), instrumentReturn(instrumentReturn)
     {}
 
-    // For CodeRanges with ProfilingOffsets, 'begin' is the offset of the
-    // profiling entry.
-    uint32_t profilingEntry() const { return begin; }
+    // For CodeRanges with InstrumentOffsets, 'begin' is the offset of the
+    // instrumentation entry.
+    uint32_t instrumentEntry() const { return begin; }
 
-    // The profiling return is the offset of the return instruction, which
+    // The instrumentation return is the offset of the return instruction, which
     // precedes the 'end' by a variable number of instructions due to
     // out-of-line codegen.
-    uint32_t profilingReturn;
+    uint32_t instrumentReturn;
 
     void offsetBy(uint32_t offset) {
         Offsets::offsetBy(offset);
-        profilingReturn += offset;
+        instrumentReturn += offset;
     }
 };
 
-struct FuncOffsets : ProfilingOffsets
+struct FuncOffsets : InstrumentOffsets
 {
     MOZ_IMPLICIT FuncOffsets()
-      : ProfilingOffsets(),
+      : InstrumentOffsets(),
         tableEntry(0),
-        tableProfilingJump(0),
-        nonProfilingEntry(0),
-        profilingJump(0),
-        profilingEpilogue(0)
+        tableInstrumentJump(0),
+        nonInstrumentEntry(0),
+        instrumentJump(0),
+        instrumentEpilogue(0)
     {}
 
     // Function CodeRanges have a table entry which takes an extra signature
     // argument which is checked against the callee's signature before falling
-    // through to the normal prologue. When profiling is enabled, a nop on the
-    // fallthrough is patched to instead jump to the profiling epilogue.
+    // through to the normal prologue. When instrumentation is enabled, a nop on
+    // the fallthrough is patched to instead jump to the instrumentation
+    // epilogue.
     uint32_t tableEntry;
-    uint32_t tableProfilingJump;
+    uint32_t tableInstrumentJump;
 
-    // Function CodeRanges have an additional non-profiling entry that comes
-    // after the profiling entry and a non-profiling epilogue that comes before
-    // the profiling epilogue.
-    uint32_t nonProfilingEntry;
+    // Function CodeRanges have an additional non-instrumentation entry that
+    // comes after the instrumentation entry and a non-instrumentation epilogue
+    // that comes before the instrumentation epilogue.
+    uint32_t nonInstrumentEntry;
 
-    // When profiling is enabled, the 'nop' at offset 'profilingJump' is
-    // overwritten to be a jump to 'profilingEpilogue'.
-    uint32_t profilingJump;
-    uint32_t profilingEpilogue;
+    // When instrumentation is enabled, the 'nop' at offset
+    // 'instrumentJump' is overwritten to be a jump to
+    // 'instrumentEpilogue'.
+    uint32_t instrumentJump;
+    uint32_t instrumentEpilogue;
 
     void offsetBy(uint32_t offset) {
-        ProfilingOffsets::offsetBy(offset);
+        InstrumentOffsets::offsetBy(offset);
         tableEntry += offset;
-        tableProfilingJump += offset;
-        nonProfilingEntry += offset;
-        profilingJump += offset;
-        profilingEpilogue += offset;
+        tableInstrumentJump += offset;
+        nonInstrumentEntry += offset;
+        instrumentJump += offset;
+        instrumentEpilogue += offset;
     }
 };
 
 // A wasm::Trap represents a wasm-defined trap that can occur during execution
 // which triggers a WebAssembly.RuntimeError. Generated code may jump to a Trap
 // symbolically, passing the bytecode offset to report as the trap offset. The
 // generated jump will be bound to a tiny stub which fills the offset and
 // then jumps to a per-Trap shared stub at the end of the module.