Bug 1286948 - Adds prolog and epilog debug traps and handlers. r?luke draft
authorYury Delendik <ydelendik@mozilla.com>
Sat, 07 Jan 2017 10:38:44 -0600
changeset 457375 c5ef138fd59f59bc0186c1056b661a27e6a04d6e
parent 457374 1f11073d790aae82c14fcee802ad106a20ea4416
child 457376 88990f91be93e8e6b2ccd8deb2b51d37b350e37d
push id40734
push userydelendik@mozilla.com
push dateSat, 07 Jan 2017 16:43:50 +0000
reviewersluke
bugs1286948
milestone53.0a1
Bug 1286948 - Adds prolog and epilog debug traps and handlers. r?luke Using toggled call/traps to invoke handler to process enter and leave frame events. MozReview-Commit-ID: APTt3N6Zt0P
js/src/jit-test/tests/wasm/profiling.js
js/src/jit/MacroAssembler.h
js/src/jit/arm/MacroAssembler-arm.cpp
js/src/jit/arm64/MacroAssembler-arm64.cpp
js/src/jit/mips-shared/MacroAssembler-mips-shared.cpp
js/src/jit/x86-shared/Assembler-x86-shared.h
js/src/jit/x86-shared/BaseAssembler-x86-shared.h
js/src/jit/x86-shared/MacroAssembler-x86-shared.cpp
js/src/wasm/WasmBaselineCompile.cpp
js/src/wasm/WasmCode.cpp
js/src/wasm/WasmCode.h
js/src/wasm/WasmDebugFrame.cpp
js/src/wasm/WasmFrameIterator.cpp
js/src/wasm/WasmFrameIterator.h
js/src/wasm/WasmGenerator.cpp
js/src/wasm/WasmGenerator.h
js/src/wasm/WasmInstance.cpp
js/src/wasm/WasmInstance.h
js/src/wasm/WasmStubs.cpp
js/src/wasm/WasmStubs.h
js/src/wasm/WasmTypes.cpp
js/src/wasm/WasmTypes.h
--- a/js/src/jit-test/tests/wasm/profiling.js
+++ b/js/src/jit-test/tests/wasm/profiling.js
@@ -122,32 +122,32 @@ function testError(code, error, expect)
 }
 
 testError(
 `(module
     (func $foo (unreachable))
     (func (export "") (call $foo))
 )`,
 WebAssembly.RuntimeError,
-["", ">", "1,>", "0,1,>", "trap handling,0,1,>", "inline stub,0,1,>", ""]);
+["", ">", "1,>", "0,1,>", "trap handling,0,1,>", "inline stub,0,1,>", "trap handling,0,1,>", "inline stub,0,1,>", ""]);
 
 testError(
 `(module
     (type $good (func))
     (type $bad (func (param i32)))
     (func $foo (call_indirect $bad (i32.const 1) (i32.const 0)))
     (func $bar (type $good))
     (table anyfunc (elem $bar))
     (export "" $foo)
 )`,
 WebAssembly.RuntimeError,
 // Technically we have this one *one-instruction* interval where
 // the caller is lost (the stack with "1,>"). It's annoying to fix and shouldn't
 // mess up profiles in practice so we ignore it.
-["", ">", "0,>", "1,0,>", "1,>", "trap handling,0,>", "inline stub,0,>", ""]);
+["", ">", "0,>", "1,0,>", "1,>", "trap handling,0,>", "inline stub,0,>", "trap handling,0,>", "inline stub,0,>", ""]);
 
 (function() {
     var e = wasmEvalText(`
     (module
         (func $foo (result i32) (i32.const 42))
         (export "foo" $foo)
         (func $bar (result i32) (i32.const 13))
         (table 10 anyfunc)
--- a/js/src/jit/MacroAssembler.h
+++ b/js/src/jit/MacroAssembler.h
@@ -524,16 +524,22 @@ class MacroAssembler : public MacroAssem
     static void repatchFarJump(uint8_t* code, uint32_t farJumpOffset, uint32_t targetOffset) PER_SHARED_ARCH;
 
     // Emit a nop that can be patched to and from a nop and a jump with an int8
     // relative displacement.
     CodeOffset nopPatchableToNearJump() PER_SHARED_ARCH;
     static void patchNopToNearJump(uint8_t* jump, uint8_t* target) PER_SHARED_ARCH;
     static void patchNearJumpToNop(uint8_t* jump) PER_SHARED_ARCH;
 
+    // Emit a nop that can be patched to and from a nop and a call with int32
+    // relative displacement.
+    CodeOffset nopPatchableToCall(const wasm::CallSiteDesc& desc) PER_SHARED_ARCH;
+    static void patchNopToCall(uint8_t* callsite, uint8_t* target) PER_SHARED_ARCH;
+    static void patchCallToNop(uint8_t* callsite) PER_SHARED_ARCH;
+
   public:
     // ===============================================================
     // ABI function calls.
 
     // Setup a call to C/C++ code, given the assumption that the framePushed
     // accruately define the state of the stack, and that the top of the stack
     // was properly aligned. Note that this only supports cdecl.
     void setupAlignedABICall(); // CRASH_ON(arm64)
--- a/js/src/jit/arm/MacroAssembler-arm.cpp
+++ b/js/src/jit/arm/MacroAssembler-arm.cpp
@@ -5123,16 +5123,44 @@ MacroAssembler::patchNopToNearJump(uint8
 
 void
 MacroAssembler::patchNearJumpToNop(uint8_t* jump)
 {
     MOZ_ASSERT(reinterpret_cast<Instruction*>(jump)->is<InstBImm>());
     new (jump) InstNOP();
 }
 
+CodeOffset
+MacroAssembler::nopPatchableToCall(const wasm::CallSiteDesc& desc)
+{
+    CodeOffset offset(currentOffset());
+    ma_nop();
+    append(desc, CodeOffset(currentOffset()), framePushed());
+    return offset;
+}
+
+void
+MacroAssembler::patchNopToCall(uint8_t* call, uint8_t* target)
+{
+    uint8_t* inst = call - 4;
+    MOZ_ASSERT(reinterpret_cast<Instruction*>(inst)->is<InstBLImm>() ||
+               reinterpret_cast<Instruction*>(inst)->is<InstNOP>());
+
+    new (inst) InstBLImm(BOffImm(target - inst), Assembler::Always);
+}
+
+void
+MacroAssembler::patchCallToNop(uint8_t* call)
+{
+    uint8_t* inst = call - 4;
+    MOZ_ASSERT(reinterpret_cast<Instruction*>(inst)->is<InstBLImm>() ||
+               reinterpret_cast<Instruction*>(inst)->is<InstNOP>());
+    new (inst) InstNOP();
+}
+
 void
 MacroAssembler::pushReturnAddress()
 {
     push(lr);
 }
 
 void
 MacroAssembler::popReturnAddress()
--- a/js/src/jit/arm64/MacroAssembler-arm64.cpp
+++ b/js/src/jit/arm64/MacroAssembler-arm64.cpp
@@ -582,16 +582,35 @@ MacroAssembler::patchNopToNearJump(uint8
 }
 
 void
 MacroAssembler::patchNearJumpToNop(uint8_t* jump)
 {
     MOZ_CRASH("NYI");
 }
 
+CodeOffset
+MacroAssembler::nopPatchableToCall(const wasm::CallSiteDesc& desc)
+{
+    MOZ_CRASH("NYI");
+    return CodeOffset();
+}
+
+void
+MacroAssembler::patchNopToCall(uint8_t* call, uint8_t* target)
+{
+    MOZ_CRASH("NYI");
+}
+
+void
+MacroAssembler::patchCallToNop(uint8_t* call)
+{
+    MOZ_CRASH("NYI");
+}
+
 void
 MacroAssembler::pushReturnAddress()
 {
     push(lr);
 }
 
 void
 MacroAssembler::popReturnAddress()
--- a/js/src/jit/mips-shared/MacroAssembler-mips-shared.cpp
+++ b/js/src/jit/mips-shared/MacroAssembler-mips-shared.cpp
@@ -1673,16 +1673,35 @@ void
 MacroAssembler::call(JitCode* c)
 {
     BufferOffset bo = m_buffer.nextOffset();
     addPendingJump(bo, ImmPtr(c->raw()), Relocation::JITCODE);
     ma_liPatchable(ScratchRegister, ImmPtr(c->raw()));
     callJitNoProfiler(ScratchRegister);
 }
 
+CodeOffset
+MacroAssembler::nopPatchableToCall(const wasm::CallSiteDesc& desc)
+{
+    MOZ_CRASH("NYI");
+    return CodeOffset();
+}
+
+void
+MacroAssembler::patchNopToCall(uint8_t* call, uint8_t* target)
+{
+    MOZ_CRASH("NYI");
+}
+
+void
+MacroAssembler::patchCallToNop(uint8_t* call)
+{
+    MOZ_CRASH("NYI");
+}
+
 void
 MacroAssembler::pushReturnAddress()
 {
     push(ra);
 }
 
 void
 MacroAssembler::popReturnAddress()
--- a/js/src/jit/x86-shared/Assembler-x86-shared.h
+++ b/js/src/jit/x86-shared/Assembler-x86-shared.h
@@ -1101,16 +1101,23 @@ class AssemblerX86Shared : public Assemb
     }
     static void patchTwoByteNopToJump(uint8_t* jump, uint8_t* target) {
         X86Encoding::BaseAssembler::patchTwoByteNopToJump(jump, target);
     }
     static void patchJumpToTwoByteNop(uint8_t* jump) {
         X86Encoding::BaseAssembler::patchJumpToTwoByteNop(jump);
     }
 
+    static void patchFiveByteNopToCall(uint8_t* callsite, uint8_t* target) {
+        X86Encoding::BaseAssembler::patchFiveByteNopToCall(callsite, target);
+    }
+    static void patchCallToFiveByteNop(uint8_t* callsite) {
+        X86Encoding::BaseAssembler::patchCallToFiveByteNop(callsite);
+    }
+
     void breakpoint() {
         masm.int3();
     }
 
     static bool HasSSE2() { return CPUInfo::IsSSE2Present(); }
     static bool HasSSE3() { return CPUInfo::IsSSE3Present(); }
     static bool HasSSSE3() { return CPUInfo::IsSSSE3Present(); }
     static bool HasSSE41() { return CPUInfo::IsSSE41Present(); }
--- a/js/src/jit/x86-shared/BaseAssembler-x86-shared.h
+++ b/js/src/jit/x86-shared/BaseAssembler-x86-shared.h
@@ -111,16 +111,50 @@ public:
     static void patchJumpToTwoByteNop(uint8_t* jump)
     {
         // See twoByteNop.
         MOZ_RELEASE_ASSERT(jump[0] == OP_JMP_rel8);
         jump[0] = PRE_OPERAND_SIZE;
         jump[1] = OP_NOP;
     }
 
+    static void patchFiveByteNopToCall(uint8_t* callsite, uint8_t* target)
+    {
+        // Note: the offset is relative to the address of the instruction after
+        // the call which is five bytes.
+        uint8_t* inst = callsite - sizeof(int32_t) - 1;
+        // The nop can be already patched as call, overriding the call.
+        // See also nop_five.
+        MOZ_ASSERT(inst[0] == OP_NOP_0F || inst[0] == OP_CALL_rel32);
+        MOZ_ASSERT_IF(inst[0] == OP_NOP_0F, inst[1] == OP_NOP_1F ||
+                                            inst[2] == OP_NOP_44 ||
+                                            inst[3] == OP_NOP_00 ||
+                                            inst[4] == OP_NOP_00);
+        inst[0] = OP_CALL_rel32;
+        SetRel32(callsite, target);
+    }
+
+    static void patchCallToFiveByteNop(uint8_t* callsite)
+    {
+        // See also patchFiveByteNopToCall and nop_five.
+        uint8_t* inst = callsite - sizeof(int32_t) - 1;
+        // The call can be already patched as nop.
+        if (inst[0] == OP_NOP_0F) {
+            MOZ_ASSERT(inst[1] == OP_NOP_1F || inst[2] == OP_NOP_44 ||
+                       inst[3] == OP_NOP_00 || inst[4] == OP_NOP_00);
+            return;
+        }
+        MOZ_ASSERT(inst[0] == OP_CALL_rel32);
+        inst[0] = OP_NOP_0F;
+        inst[1] = OP_NOP_1F;
+        inst[2] = OP_NOP_44;
+        inst[3] = OP_NOP_00;
+        inst[4] = OP_NOP_00;
+    }
+
     /*
      * The nop multibytes sequences are directly taken from the Intel's
      * architecture software developer manual.
      * They are defined for sequences of sizes from 1 to 9 included.
      */
     void nop_one()
     {
         m_formatter.oneByteOp(OP_NOP);
--- a/js/src/jit/x86-shared/MacroAssembler-x86-shared.cpp
+++ b/js/src/jit/x86-shared/MacroAssembler-x86-shared.cpp
@@ -736,16 +736,38 @@ MacroAssembler::patchNopToNearJump(uint8
 }
 
 void
 MacroAssembler::patchNearJumpToNop(uint8_t* jump)
 {
     Assembler::patchJumpToTwoByteNop(jump);
 }
 
+CodeOffset
+MacroAssembler::nopPatchableToCall(const wasm::CallSiteDesc& desc)
+{
+    CodeOffset offset(currentOffset());
+    masm.nop_five();
+    append(desc, CodeOffset(currentOffset()), framePushed());
+    MOZ_ASSERT_IF(!oom(), size() - offset.offset() == ToggledCallSize(nullptr));
+    return offset;
+}
+
+void
+MacroAssembler::patchNopToCall(uint8_t* callsite, uint8_t* target)
+{
+    Assembler::patchFiveByteNopToCall(callsite, target);
+}
+
+void
+MacroAssembler::patchCallToNop(uint8_t* callsite)
+{
+    Assembler::patchCallToFiveByteNop(callsite);
+}
+
 // ===============================================================
 // Jit Frames.
 
 uint32_t
 MacroAssembler::pushFakeReturnAddress(Register scratch)
 {
     CodeLabel cl;
 
--- a/js/src/wasm/WasmBaselineCompile.cpp
+++ b/js/src/wasm/WasmBaselineCompile.cpp
@@ -2075,16 +2075,21 @@ class BaseCompiler
         return new (candidate) PooledLabel(this);
     }
 
     void freeLabel(PooledLabel* label) {
         label->~PooledLabel();
         labelPool_.free(label);
     }
 
+    void insertBreakablePoint(CallSiteDesc::Kind kind) {
+        const uint32_t offset = iter_.currentOffset();
+        masm.nopPatchableToCall(CallSiteDesc(offset, kind));
+    }
+
     //////////////////////////////////////////////////////////////////////
     //
     // Function prologue and epilogue.
 
     void beginFunction() {
         JitSpew(JitSpew_Codegen, "# Emitting wasm baseline code");
 
         SigIdDesc sigId = env_.funcSigs[func_.index()]->id;
@@ -2162,16 +2167,68 @@ class BaseCompiler
         // then it's better to store a zero literal, probably.
 
         if (varLow_ < varHigh_) {
             ScratchI32 scratch(*this);
             masm.mov(ImmWord(0), scratch);
             for (int32_t i = varLow_ ; i < varHigh_ ; i += 4)
                 storeToFrameI32(scratch, i + 4);
         }
+
+        if (debugEnabled_)
+            insertBreakablePoint(CallSiteDesc::EnterFrame);
+    }
+
+    void saveResult() {
+        MOZ_ASSERT(debugEnabled_);
+        size_t debugFrameOffset = masm.framePushed() - DebugFrame::offsetOfFrame();
+        Address resultsAddress(StackPointer, debugFrameOffset + DebugFrame::offsetOfResults());
+        switch (func_.sig().ret()) {
+          case ExprType::Void:
+            break;
+          case ExprType::I32:
+            masm.store32(RegI32(ReturnReg), resultsAddress);
+            break;
+
+          case ExprType::I64:
+            masm.store64(RegI64(ReturnReg64), resultsAddress);
+            break;
+          case ExprType::F64:
+            masm.storeDouble(RegF64(ReturnDoubleReg), resultsAddress);
+            break;
+          case ExprType::F32:
+            masm.storeFloat32(RegF32(ReturnFloat32Reg), resultsAddress);
+            break;
+          default:
+            MOZ_CRASH("Function return type");
+        }
+    }
+
+    void restoreResult() {
+        MOZ_ASSERT(debugEnabled_);
+        size_t debugFrameOffset = masm.framePushed() - DebugFrame::offsetOfFrame();
+        Address resultsAddress(StackPointer, debugFrameOffset + DebugFrame::offsetOfResults());
+        switch (func_.sig().ret()) {
+          case ExprType::Void:
+            break;
+          case ExprType::I32:
+            masm.load32(resultsAddress, RegI32(ReturnReg));
+            break;
+          case ExprType::I64:
+            masm.load64(resultsAddress, RegI64(ReturnReg64));
+            break;
+          case ExprType::F64:
+            masm.loadDouble(resultsAddress, RegF64(ReturnDoubleReg));
+            break;
+          case ExprType::F32:
+            masm.loadFloat32(resultsAddress, RegF32(ReturnFloat32Reg));
+            break;
+          default:
+            MOZ_CRASH("Function return type");
+        }
     }
 
     bool endFunction() {
         // Always branch to stackOverflowLabel_ or returnLabel_.
         masm.breakpoint();
 
         // Patch the add in the prologue so that it checks against the correct
         // frame size.
@@ -2183,16 +2240,24 @@ class BaseCompiler
         // distance away from the end of the native stack.
         masm.bind(&stackOverflowLabel_);
         if (localSize_)
             masm.addToStackPtr(Imm32(localSize_));
         masm.jump(TrapDesc(prologueTrapOffset_, Trap::StackOverflow, /* framePushed = */ 0));
 
         masm.bind(&returnLabel_);
 
+        if (debugEnabled_) {
+            // Store and reload the return value from DebugFrame::return so that
+            // it can be clobbered, and/or modified by the debug trap.
+            saveResult();
+            insertBreakablePoint(CallSiteDesc::LeaveFrame);
+            restoreResult();
+        }
+
         // Restore the TLS register in case it was overwritten by the function.
         loadFromFramePtr(WasmTlsReg, frameOffsetFromSlot(tlsSlot_, MIRType::Pointer));
 
         GenerateFunctionEpilogue(masm, localSize_, &offsets_);
 
 #if defined(JS_ION_PERF)
         // FIXME - profiling code missing.  Bug 1286948.
 
--- a/js/src/wasm/WasmCode.cpp
+++ b/js/src/wasm/WasmCode.cpp
@@ -341,17 +341,18 @@ CodeRange::CodeRange(Kind kind, Offsets 
     funcBeginToTableEntry_(0),
     funcBeginToTableProfilingJump_(0),
     funcBeginToNonProfilingEntry_(0),
     funcProfilingJumpToProfilingReturn_(0),
     funcProfilingEpilogueToProfilingReturn_(0),
     kind_(kind)
 {
     MOZ_ASSERT(begin_ <= end_);
-    MOZ_ASSERT(kind_ == Entry || kind_ == Inline || kind_ == FarJumpIsland);
+    MOZ_ASSERT(kind_ == Entry || kind_ == Inline ||
+               kind_ == FarJumpIsland || kind_ == DebugTrap);
 }
 
 CodeRange::CodeRange(Kind kind, ProfilingOffsets offsets)
   : begin_(offsets.begin),
     profilingReturn_(offsets.profilingReturn),
     end_(offsets.end),
     funcIndex_(0),
     funcLineOrBytecode_(0),
@@ -453,17 +454,17 @@ Metadata::serializedSize() const
            SerializedPodVectorSize(funcNames) +
            SerializedPodVectorSize(customSections) +
            filename.serializedSize();
 }
 
 uint8_t*
 Metadata::serialize(uint8_t* cursor) const
 {
-    MOZ_ASSERT(!debugEnabled);
+    MOZ_ASSERT(!debugEnabled && debugTrapFarJumpOffsets.empty());
     cursor = WriteBytes(cursor, &pod(), sizeof(pod()));
     cursor = SerializeVector(cursor, funcImports);
     cursor = SerializeVector(cursor, funcExports);
     cursor = SerializeVector(cursor, sigIds);
     cursor = SerializePodVector(cursor, globals);
     cursor = SerializePodVector(cursor, tables);
     cursor = SerializePodVector(cursor, memoryAccesses);
     cursor = SerializePodVector(cursor, memoryPatches);
@@ -491,16 +492,17 @@ Metadata::deserialize(const uint8_t* cur
     (cursor = DeserializePodVector(cursor, &boundsChecks)) &&
     (cursor = DeserializePodVector(cursor, &codeRanges)) &&
     (cursor = DeserializePodVector(cursor, &callSites)) &&
     (cursor = DeserializePodVector(cursor, &callThunks)) &&
     (cursor = DeserializePodVector(cursor, &funcNames)) &&
     (cursor = DeserializePodVector(cursor, &customSections)) &&
     (cursor = filename.deserialize(cursor));
     debugEnabled = false;
+    debugTrapFarJumpOffsets.clear();
     return cursor;
 }
 
 size_t
 Metadata::sizeOfExcludingThis(MallocSizeOf mallocSizeOf) const
 {
     return SizeOfVectorExcludingThis(funcImports, mallocSizeOf) +
            SizeOfVectorExcludingThis(funcExports, mallocSizeOf) +
@@ -568,16 +570,17 @@ Metadata::getFuncName(const Bytes* maybe
 }
 
 Code::Code(UniqueCodeSegment segment,
            const Metadata& metadata,
            const ShareableBytes* maybeBytecode)
   : segment_(Move(segment)),
     metadata_(&metadata),
     maybeBytecode_(maybeBytecode),
+    enterAndLeaveFrameTrapsCounter_(0),
     profilingEnabled_(false)
 {
     MOZ_ASSERT_IF(metadata_->debugEnabled, maybeBytecode);
 }
 
 struct CallSiteRetAddrOffset
 {
     const CallSiteVector& callSites;
@@ -816,16 +819,62 @@ Code::ensureProfilingState(JSRuntime* rt
         for (const CodeRange& codeRange : metadata_->codeRanges)
             ToggleProfiling(*this, codeRange, newProfilingEnabled);
     }
 
     return true;
 }
 
 void
+Code::toggleDebugTrap(uint32_t offset, bool enabled)
+{
+    MOZ_ASSERT(offset);
+    uint8_t* trap = segment_->base() + offset;
+    const Uint32Vector& farJumpOffsets = metadata_->debugTrapFarJumpOffsets;
+    if (enabled) {
+        MOZ_ASSERT(farJumpOffsets.length() > 0);
+        size_t i = 0;
+        while (i < farJumpOffsets.length() && offset < farJumpOffsets[i])
+            i++;
+        if (i >= farJumpOffsets.length() ||
+            (i > 0 && offset - farJumpOffsets[i - 1] < farJumpOffsets[i] - offset))
+            i--;
+        uint8_t* farJump = segment_->base() + farJumpOffsets[i];
+        MacroAssembler::patchNopToCall(trap, farJump);
+    } else {
+        MacroAssembler::patchCallToNop(trap);
+    }
+}
+
+void
+Code::adjustEnterAndLeaveFrameTrapsState(JSContext* cx, bool enabled)
+{
+    MOZ_ASSERT(metadata_->debugEnabled);
+    MOZ_ASSERT_IF(!enabled, enterAndLeaveFrameTrapsCounter_ > 0);
+
+    bool wasEnabled = enterAndLeaveFrameTrapsCounter_ > 0;
+    if (enabled)
+        ++enterAndLeaveFrameTrapsCounter_;
+    else
+        --enterAndLeaveFrameTrapsCounter_;
+    bool stillEnabled = enterAndLeaveFrameTrapsCounter_ > 0;
+    if (wasEnabled == stillEnabled)
+        return;
+
+    AutoWritableJitCode awjc(cx->runtime(), segment_->base(), segment_->codeLength());
+    AutoFlushICache afc("Code::adjustEnterAndLeaveFrameTrapsState");
+    AutoFlushICache::setRange(uintptr_t(segment_->base()), segment_->codeLength());
+    for (const CallSite& callSite : metadata_->callSites) {
+        if (callSite.kind() != CallSite::EnterFrame && callSite.kind() != CallSite::LeaveFrame)
+            continue;
+        toggleDebugTrap(callSite.returnAddressOffset(), stillEnabled);
+    }
+}
+
+void
 Code::addSizeOfMisc(MallocSizeOf mallocSizeOf,
                     Metadata::SeenSet* seenMetadata,
                     ShareableBytes::SeenSet* seenBytes,
                     size_t* code,
                     size_t* data) const
 {
     *code += segment_->codeLength();
     *data += mallocSizeOf(this) +
--- a/js/src/wasm/WasmCode.h
+++ b/js/src/wasm/WasmCode.h
@@ -14,26 +14,29 @@
  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
 
 #ifndef wasm_code_h
 #define wasm_code_h
 
+#include "js/HashTable.h"
 #include "wasm/WasmTypes.h"
 
 namespace js {
 
 struct AsmJSMetadata;
+class WasmActivation;
 
 namespace wasm {
 
 struct LinkData;
 struct Metadata;
+class FrameIterator;
 
 // A wasm CodeSegment owns the allocated executable code for a wasm module.
 // This allocation also currently includes the global data segment, which allows
 // RIP-relative access to global data on some architectures, but this will
 // change in the future to give global data its own allocation.
 
 class CodeSegment;
 typedef UniquePtr<CodeSegment> UniqueCodeSegment;
@@ -235,16 +238,18 @@ class CodeRange
 {
   public:
     enum Kind {
         Function,          // function definition
         Entry,             // calls into wasm from C++
         ImportJitExit,     // fast-path calling from wasm into JIT code
         ImportInterpExit,  // slow-path calling from wasm into C++ interp
         TrapExit,          // calls C++ to report and jumps to throw stub
+        DebugTrap,         // calls C++ to handle debug event such as
+                           // enter/leave frame or breakpoint
         FarJumpIsland,     // inserted to connect otherwise out-of-range insns
         Inline             // stub that is jumped-to, not called, and thus
                            // replaces/loses preceding innermost frame
     };
 
   private:
     // All fields are treated as cacheable POD:
     uint32_t begin_;
@@ -464,16 +469,17 @@ struct Metadata : ShareableBase<Metadata
     CallSiteVector        callSites;
     CallThunkVector       callThunks;
     NameInBytecodeVector  funcNames;
     CustomSectionVector   customSections;
     CacheableChars        filename;
 
     // Debug-enabled code is not serialized.
     bool                  debugEnabled;
+    Uint32Vector          debugTrapFarJumpOffsets;
 
     bool usesMemory() const { return UsesMemory(memoryUsage); }
     bool hasSharedMemory() const { return memoryUsage == MemoryUsage::Shared; }
 
     const FuncExport& lookupFuncExport(uint32_t funcIndex) const;
 
     // AsmJSMetadata derives Metadata iff isAsmJS(). Mostly this distinction is
     // encapsulated within AsmJS.cpp, but the additional virtual functions allow
@@ -569,18 +575,21 @@ typedef UniquePtr<GeneratedSourceMap> Un
 
 class Code
 {
     const UniqueCodeSegment  segment_;
     const SharedMetadata     metadata_;
     const SharedBytes        maybeBytecode_;
     UniqueGeneratedSourceMap maybeSourceMap_;
     CacheableCharsVector     funcLabels_;
+    uint32_t                 enterAndLeaveFrameTrapsCounter_;
     bool                     profilingEnabled_;
 
+    void toggleDebugTrap(uint32_t offset, bool enabled);
+
   public:
     Code(UniqueCodeSegment segment,
          const Metadata& metadata,
          const ShareableBytes* maybeBytecode);
 
     CodeSegment& segment() { return *segment_; }
     const CodeSegment& segment() const { return *segment_; }
     const Metadata& metadata() const { return *metadata_; }
@@ -609,16 +618,22 @@ class Code
     // the stack. Once in profiling mode, ProfilingFrameIterator can be used to
     // asynchronously walk the stack. Otherwise, the ProfilingFrameIterator will
     // skip any activations of this code.
 
     MOZ_MUST_USE bool ensureProfilingState(JSRuntime* rt, bool enabled);
     bool profilingEnabled() const { return profilingEnabled_; }
     const char* profilingLabel(uint32_t funcIndex) const { return funcLabels_[funcIndex].get(); }
 
+    // The Code can track enter/leave frame events. Any such event triggers
+    // debug trap. The enter frame events enabled across all functions, but
+    // the leave frame events only for particular function.
+
+    void adjustEnterAndLeaveFrameTrapsState(JSContext* cx, bool enabled);
+
     // about:memory reporting:
 
     void addSizeOfMisc(MallocSizeOf mallocSizeOf,
                        Metadata::SeenSet* seenMetadata,
                        ShareableBytes::SeenSet* seenBytes,
                        size_t* code,
                        size_t* data) const;
 
--- a/js/src/wasm/WasmDebugFrame.cpp
+++ b/js/src/wasm/WasmDebugFrame.cpp
@@ -45,21 +45,21 @@ DebugFrame::environmentChain() const
 }
 
 void
 DebugFrame::observeFrame(JSContext* cx)
 {
    if (observing_)
        return;
 
-   // TODO make sure wasm::Code onLeaveFrame traps are on
+   instance()->code().adjustEnterAndLeaveFrameTrapsState(cx, /* enabled = */ true);
    observing_ = true;
 }
 
 void
 DebugFrame::leaveFrame(JSContext* cx)
 {
    if (!observing_)
        return;
 
-   // TODO make sure wasm::Code onLeaveFrame traps are off
+   instance()->code().adjustEnterAndLeaveFrameTrapsState(cx, /* enabled = */ false);
    observing_ = false;
 }
--- a/js/src/wasm/WasmFrameIterator.cpp
+++ b/js/src/wasm/WasmFrameIterator.cpp
@@ -146,16 +146,17 @@ FrameIterator::settle()
         pc_ = nullptr;
         code_ = nullptr;
         codeRange_ = nullptr;
         MOZ_ASSERT(done());
         break;
       case CodeRange::ImportJitExit:
       case CodeRange::ImportInterpExit:
       case CodeRange::TrapExit:
+      case CodeRange::DebugTrap:
       case CodeRange::Inline:
       case CodeRange::FarJumpIsland:
         MOZ_CRASH("Should not encounter an exit during iteration");
     }
 }
 
 const char*
 FrameIterator::filename() const
@@ -235,16 +236,24 @@ DebugFrame*
 FrameIterator::debugFrame() const
 {
     MOZ_ASSERT(!done() && debugEnabled());
     // The fp() points to wasm::Frame.
     void* buf = static_cast<uint8_t*>(fp_ + callsite_->stackDepth()) - DebugFrame::offsetOfFrame();
     return static_cast<DebugFrame*>(buf);
 }
 
+const CallSite*
+FrameIterator::debugTrapCallsite() const
+{
+    MOZ_ASSERT(!done() && debugEnabled());
+    MOZ_ASSERT(callsite_->kind() == CallSite::EnterFrame || callsite_->kind() == CallSite::LeaveFrame);
+    return callsite_;
+}
+
 /*****************************************************************************/
 // Prologue/epilogue code generation
 
 // These constants reflect statically-determined offsets in the profiling
 // prologue/epilogue. The offsets are dynamically asserted during code
 // generation.
 #if defined(JS_CODEGEN_X64)
 # if defined(DEBUG)
@@ -588,16 +597,17 @@ ProfilingFrameIterator::initFromFP()
         fp = CallerFPFromFP(fp);
         callerPC_ = ReturnAddressFromFP(fp);
         callerFP_ = CallerFPFromFP(fp);
         AssertMatchesCallSite(*activation_, callerPC_, callerFP_, fp);
         break;
       case CodeRange::ImportJitExit:
       case CodeRange::ImportInterpExit:
       case CodeRange::TrapExit:
+      case CodeRange::DebugTrap:
       case CodeRange::Inline:
       case CodeRange::FarJumpIsland:
         MOZ_CRASH("Unexpected CodeRange kind");
     }
 
     // The iterator inserts a pretend innermost frame for non-None ExitReasons.
     // This allows the variety of exit reasons to show up in the callstack.
     exitReason_ = activation_->exitReason();
@@ -716,16 +726,17 @@ ProfilingFrameIterator::ProfilingFrameIt
         // The entry trampoline is the final frame in an WasmActivation. The entry
         // trampoline also doesn't GeneratePrologue/Epilogue so we can't use
         // the general unwinding logic above.
         MOZ_ASSERT(!fp);
         callerPC_ = nullptr;
         callerFP_ = nullptr;
         break;
       }
+      case CodeRange::DebugTrap:
       case CodeRange::Inline: {
         // The throw stub clears WasmActivation::fp on it's way out.
         if (!fp) {
             MOZ_ASSERT(done());
             return;
         }
 
         // Most inline code stubs execute after the prologue/epilogue have
@@ -772,16 +783,17 @@ ProfilingFrameIterator::operator++()
       case CodeRange::Entry:
         MOZ_ASSERT(callerFP_ == nullptr);
         callerPC_ = nullptr;
         break;
       case CodeRange::Function:
       case CodeRange::ImportJitExit:
       case CodeRange::ImportInterpExit:
       case CodeRange::TrapExit:
+      case CodeRange::DebugTrap:
       case CodeRange::Inline:
       case CodeRange::FarJumpIsland:
         stackAddress_ = callerFP_;
         callerPC_ = ReturnAddressFromFP(callerFP_);
         AssertMatchesCallSite(*activation_, callerPC_, CallerFPFromFP(callerFP_), callerFP_);
         callerFP_ = CallerFPFromFP(callerFP_);
         break;
     }
@@ -798,36 +810,40 @@ ProfilingFrameIterator::label() const
     // entries will be coalesced by the profiler.
     //
     // NB: these labels are parsed for location by
     //     devtools/client/performance/modules/logic/frame-utils.js
     const char* importJitDescription = "fast FFI trampoline (in asm.js)";
     const char* importInterpDescription = "slow FFI trampoline (in asm.js)";
     const char* nativeDescription = "native call (in asm.js)";
     const char* trapDescription = "trap handling (in asm.js)";
+    const char* debugTrapDescription = "debug trap handling (in asm.js)";
 
     switch (exitReason_) {
       case ExitReason::None:
         break;
       case ExitReason::ImportJit:
         return importJitDescription;
       case ExitReason::ImportInterp:
         return importInterpDescription;
       case ExitReason::Native:
         return nativeDescription;
       case ExitReason::Trap:
         return trapDescription;
+      case ExitReason::DebugTrap:
+        return debugTrapDescription;
     }
 
     switch (codeRange_->kind()) {
       case CodeRange::Function:         return code_->profilingLabel(codeRange_->funcIndex());
       case CodeRange::Entry:            return "entry trampoline (in asm.js)";
       case CodeRange::ImportJitExit:    return importJitDescription;
       case CodeRange::ImportInterpExit: return importInterpDescription;
       case CodeRange::TrapExit:         return trapDescription;
+      case CodeRange::DebugTrap:        return debugTrapDescription;
       case CodeRange::Inline:           return "inline stub (in asm.js)";
       case CodeRange::FarJumpIsland:    return "interstitial (in asm.js)";
     }
 
     MOZ_CRASH("bad code range kind");
 }
 
 /*****************************************************************************/
--- a/js/src/wasm/WasmFrameIterator.h
+++ b/js/src/wasm/WasmFrameIterator.h
@@ -70,27 +70,29 @@ class FrameIterator
     const char16_t* displayURL() const;
     bool mutedErrors() const;
     JSAtom* functionDisplayAtom() const;
     unsigned lineOrBytecode() const;
     const CodeRange* codeRange() const { return codeRange_; }
     Instance* instance() const;
     bool debugEnabled() const;
     DebugFrame* debugFrame() const;
+    const CallSite* debugTrapCallsite() const;
 };
 
 // An ExitReason describes the possible reasons for leaving compiled wasm code
 // or the state of not having left compiled wasm code (ExitReason::None).
 enum class ExitReason : uint32_t
 {
     None,          // default state, the pc is in wasm code
     ImportJit,     // fast-path call directly into JIT code
     ImportInterp,  // slow-path call into C++ Invoke()
     Native,        // call to native C++ code (e.g., Math.sin, ToInt32(), interrupt)
-    Trap           // call to trap handler for the trap in WasmActivation::trap
+    Trap,          // call to trap handler for the trap in WasmActivation::trap
+    DebugTrap      // call to debug trap handler
 };
 
 // Iterates over the frames of a single WasmActivation, given an
 // asynchrously-interrupted thread's state. If the activation's
 // module is not in profiling mode, the activation is skipped.
 class ProfilingFrameIterator
 {
     const WasmActivation* activation_;
--- a/js/src/wasm/WasmGenerator.cpp
+++ b/js/src/wasm/WasmGenerator.cpp
@@ -373,35 +373,60 @@ ModuleGenerator::patchCallSites(TrapExit
                 if (!metadata_->codeRanges.emplaceBack(CodeRange::FarJumpIsland, offsets))
                     return false;
                 existingTrapFarJumps[cs.trap()] = Some(offsets.begin);
             }
 
             masm_.patchCall(callerOffset, *existingTrapFarJumps[cs.trap()]);
             break;
           }
+          case CallSiteDesc::EnterFrame:
+          case CallSiteDesc::LeaveFrame: {
+            Uint32Vector& jumps = metadata_->debugTrapFarJumpOffsets;
+            if (jumps.empty() ||
+                uint32_t(abs(int32_t(jumps.back()) - int32_t(callerOffset))) >= JumpRange())
+            {
+                Offsets offsets;
+                offsets.begin = masm_.currentOffset();
+                uint32_t jumpOffset = masm_.farJumpWithPatch().offset();
+                offsets.end = masm_.currentOffset();
+                if (masm_.oom())
+                    return false;
+
+                if (!metadata_->codeRanges.emplaceBack(CodeRange::FarJumpIsland, offsets))
+                    return false;
+                if (!debugTrapFarJumps_.emplaceBack(jumpOffset))
+                    return false;
+                if (!jumps.emplaceBack(offsets.begin))
+                    return false;
+            }
+            break;
+          }
         }
     }
 
     return true;
 }
 
 bool
-ModuleGenerator::patchFarJumps(const TrapExitOffsetArray& trapExits)
+ModuleGenerator::patchFarJumps(const TrapExitOffsetArray& trapExits, const Offsets& debugTrapStub)
 {
     for (CallThunk& callThunk : metadata_->callThunks) {
         uint32_t funcIndex = callThunk.u.funcIndex;
         callThunk.u.codeRangeIndex = funcToCodeRange_[funcIndex];
         CodeOffset farJump(callThunk.offset);
         masm_.patchFarJump(farJump, funcCodeRange(funcIndex).funcNonProfilingEntry());
     }
 
     for (const TrapFarJump& farJump : masm_.trapFarJumps())
         masm_.patchFarJump(farJump.jump, trapExits[farJump.trap].begin);
 
+    for (uint32_t debugTrapFarJump : debugTrapFarJumps_)
+        masm_.patchFarJump(CodeOffset(debugTrapFarJump), debugTrapStub.begin);
+
     return true;
 }
 
 bool
 ModuleGenerator::finishTask(CompileTask* task)
 {
     masm_.haltingAlign(CodeAlignment);
 
@@ -507,16 +532,17 @@ ModuleGenerator::finishCodegen()
     OffsetVector entries;
     ProfilingOffsetVector interpExits;
     ProfilingOffsetVector jitExits;
     TrapExitOffsetArray trapExits;
     Offsets outOfBoundsExit;
     Offsets unalignedAccessExit;
     Offsets interruptExit;
     Offsets throwStub;
+    Offsets debugTrapStub;
 
     {
         TempAllocator alloc(&lifo_);
         MacroAssembler masm(MacroAssembler::WasmToken(), alloc);
         Label throwLabel;
 
         if (!entries.resize(numFuncExports))
             return false;
@@ -534,16 +560,17 @@ ModuleGenerator::finishCodegen()
 
         for (Trap trap : MakeEnumeratedRange(Trap::Limit))
             trapExits[trap] = GenerateTrapExit(masm, trap, &throwLabel);
 
         outOfBoundsExit = GenerateOutOfBoundsExit(masm, &throwLabel);
         unalignedAccessExit = GenerateUnalignedExit(masm, &throwLabel);
         interruptExit = GenerateInterruptExit(masm, &throwLabel);
         throwStub = GenerateThrowStub(masm, &throwLabel);
+        debugTrapStub = GenerateDebugTrapStub(masm, &throwLabel);
 
         if (masm.oom() || !masm_.asmMergeWith(masm))
             return false;
     }
 
     // Adjust each of the resulting Offsets (to account for being merged into
     // masm_) and then create code ranges for all the stubs.
 
@@ -583,29 +610,33 @@ ModuleGenerator::finishCodegen()
     interruptExit.offsetBy(offsetInWhole);
     if (!metadata_->codeRanges.emplaceBack(CodeRange::Inline, interruptExit))
         return false;
 
     throwStub.offsetBy(offsetInWhole);
     if (!metadata_->codeRanges.emplaceBack(CodeRange::Inline, throwStub))
         return false;
 
+    debugTrapStub.offsetBy(offsetInWhole);
+    if (!metadata_->codeRanges.emplaceBack(CodeRange::DebugTrap, debugTrapStub))
+        return false;
+
     // Fill in LinkData with the offsets of these stubs.
 
     linkData_.outOfBoundsOffset = outOfBoundsExit.begin;
     linkData_.interruptOffset = interruptExit.begin;
 
     // Now that all other code has been emitted, patch all remaining callsites
     // then far jumps. Patching callsites can generate far jumps so there is an
     // ordering dependency.
 
     if (!patchCallSites(&trapExits))
         return false;
 
-    if (!patchFarJumps(trapExits))
+    if (!patchFarJumps(trapExits, debugTrapStub))
         return false;
 
     // Code-generation is complete!
 
     masm_.finish();
     return !masm_.oom();
 }
 
@@ -1146,16 +1177,17 @@ ModuleGenerator::finish(const ShareableB
     // These Vectors can get large and the excess capacity can be significant,
     // so realloc them down to size.
     metadata_->memoryAccesses.podResizeToFit();
     metadata_->memoryPatches.podResizeToFit();
     metadata_->boundsChecks.podResizeToFit();
     metadata_->codeRanges.podResizeToFit();
     metadata_->callSites.podResizeToFit();
     metadata_->callThunks.podResizeToFit();
+    metadata_->debugTrapFarJumpOffsets.podResizeToFit();
 
     // For asm.js, the tables vector is over-allocated (to avoid resize during
     // parallel copilation). Shrink it back down to fit.
     if (isAsmJS() && !metadata_->tables.resize(numTables_))
         return nullptr;
 
     metadata_->debugEnabled = debugEnabled_;
 
@@ -1163,16 +1195,25 @@ ModuleGenerator::finish(const ShareableB
 #ifdef DEBUG
     uint32_t lastEnd = 0;
     for (const CodeRange& codeRange : metadata_->codeRanges) {
         MOZ_ASSERT(codeRange.begin() >= lastEnd);
         lastEnd = codeRange.end();
     }
 #endif
 
+    // Assert debugTrapFarJumpOffsets are sorted.
+#ifdef DEBUG
+    uint32_t lastOffset = 0;
+    for (uint32_t debugTrapFarJumpOffset : metadata_->debugTrapFarJumpOffsets) {
+        MOZ_ASSERT(debugTrapFarJumpOffset >= lastOffset);
+        lastOffset = debugTrapFarJumpOffset;
+    }
+#endif
+
     if (!finishLinkData(code))
         return nullptr;
 
     return SharedModule(js_new<Module>(Move(assumptions_),
                                        Move(code),
                                        Move(linkData_),
                                        Move(env_->imports),
                                        Move(env_->exports),
--- a/js/src/wasm/WasmGenerator.h
+++ b/js/src/wasm/WasmGenerator.h
@@ -229,16 +229,17 @@ class MOZ_STACK_CLASS ModuleGenerator
     LifoAlloc                       lifo_;
     jit::JitContext                 jcx_;
     jit::TempAllocator              masmAlloc_;
     jit::MacroAssembler             masm_;
     Uint32Vector                    funcToCodeRange_;
     Uint32Set                       exportedFuncs_;
     uint32_t                        lastPatchedCallsite_;
     uint32_t                        startOfUnpatchedCallsites_;
+    Uint32Vector                    debugTrapFarJumps_;
 
     // Parallel compilation
     bool                            parallel_;
     uint32_t                        outstanding_;
     CompileTaskVector               tasks_;
     CompileTaskPtrVector            freeTasks_;
     UniqueFuncBytesVector           freeFuncBytes_;
     CompileTask*                    currentTask_;
@@ -249,17 +250,17 @@ class MOZ_STACK_CLASS ModuleGenerator
     DebugOnly<bool>                 startedFuncDefs_;
     DebugOnly<bool>                 finishedFuncDefs_;
     DebugOnly<uint32_t>             numFinishedFuncDefs_;
 
     bool funcIsCompiled(uint32_t funcIndex) const;
     const CodeRange& funcCodeRange(uint32_t funcIndex) const;
     uint32_t numFuncImports() const;
     MOZ_MUST_USE bool patchCallSites(TrapExitOffsetArray* maybeTrapExits = nullptr);
-    MOZ_MUST_USE bool patchFarJumps(const TrapExitOffsetArray& trapExits);
+    MOZ_MUST_USE bool patchFarJumps(const TrapExitOffsetArray& trapExits, const Offsets& debugTrapStub);
     MOZ_MUST_USE bool finishTask(CompileTask* task);
     MOZ_MUST_USE bool finishOutstandingTask();
     MOZ_MUST_USE bool finishFuncExports();
     MOZ_MUST_USE bool finishCodegen();
     MOZ_MUST_USE bool finishLinkData(Bytes& code);
     MOZ_MUST_USE bool addFuncImport(const Sig& sig, uint32_t globalDataOffset);
     MOZ_MUST_USE bool allocateGlobalBytes(uint32_t bytes, uint32_t align, uint32_t* globalDataOff);
     MOZ_MUST_USE bool allocateGlobal(GlobalDesc* global);
--- a/js/src/wasm/WasmInstance.cpp
+++ b/js/src/wasm/WasmInstance.cpp
@@ -324,17 +324,18 @@ Instance::Instance(JSContext* cx,
                    HandleWasmMemoryObject memory,
                    SharedTableVector&& tables,
                    Handle<FunctionVector> funcImports,
                    const ValVector& globalImports)
   : compartment_(cx->compartment()),
     object_(object),
     code_(Move(code)),
     memory_(memory),
-    tables_(Move(tables))
+    tables_(Move(tables)),
+    enterFrameTrapsEnabled_(false)
 {
     MOZ_ASSERT(funcImports.length() == metadata().funcImports.length());
     MOZ_ASSERT(tables_.length() == metadata().tables.length());
 
     tlsData_.cx = cx;
     tlsData_.instance = this;
     tlsData_.globalData = code_->segment().globalData();
     tlsData_.memoryBase = memory ? memory->buffer().dataPointerEither().unwrap() : nullptr;
@@ -828,16 +829,26 @@ Instance::ensureProfilingState(JSContext
                 UpdateEntry(*code_, newProfilingEnabled, &array[i]);
         }
     }
 
     return true;
 }
 
 void
+Instance::ensureEnterFrameTrapsState(JSContext* cx, bool enabled)
+{
+    if (enterFrameTrapsEnabled_ == enabled)
+        return;
+
+    code_->adjustEnterAndLeaveFrameTrapsState(cx, enabled);
+    enterFrameTrapsEnabled_ = enabled;
+}
+
+void
 Instance::addSizeOfMisc(MallocSizeOf mallocSizeOf,
                         Metadata::SeenSet* seenMetadata,
                         ShareableBytes::SeenSet* seenBytes,
                         Table::SeenSet* seenTables,
                         size_t* code,
                         size_t* data) const
 {
     *data += mallocSizeOf(this);
--- a/js/src/wasm/WasmInstance.h
+++ b/js/src/wasm/WasmInstance.h
@@ -36,16 +36,17 @@ namespace wasm {
 class Instance
 {
     JSCompartment* const            compartment_;
     ReadBarrieredWasmInstanceObject object_;
     const UniqueCode                code_;
     GCPtrWasmMemoryObject           memory_;
     SharedTableVector               tables_;
     TlsData                         tlsData_;
+    bool                            enterFrameTrapsEnabled_;
 
     // Internal helpers:
     const void** addressOfSigId(const SigIdDesc& sigId) const;
     FuncImportTls& funcImportTls(const FuncImport& fi);
     TableTls& tableTls(const TableDesc& td) const;
 
     // Import call slow paths which are called directly from wasm code.
     friend void* AddressOf(SymbolicAddress, ExclusiveContext*);
@@ -119,16 +120,21 @@ class Instance
 
     void onMovingGrowMemory(uint8_t* prevMemoryBase);
     void onMovingGrowTable();
 
     // See Code::ensureProfilingState comment.
 
     MOZ_MUST_USE bool ensureProfilingState(JSContext* cx, bool enabled);
 
+    // Debug support:
+    bool debugEnabled() const { return code_->metadata().debugEnabled; }
+    bool enterFrameTrapsEnabled() const { return enterFrameTrapsEnabled_; }
+    void ensureEnterFrameTrapsState(JSContext* cx, bool enabled);
+
     // about:memory reporting:
 
     void addSizeOfMisc(MallocSizeOf mallocSizeOf,
                        Metadata::SeenSet* seenMetadata,
                        ShareableBytes::SeenSet* seenBytes,
                        Table::SeenSet* seenTables,
                        size_t* code,
                        size_t* data) const;
--- a/js/src/wasm/WasmStubs.cpp
+++ b/js/src/wasm/WasmStubs.cpp
@@ -941,16 +941,20 @@ wasm::GenerateUnalignedExit(MacroAssembl
 {
     return GenerateGenericMemoryAccessTrap(masm, SymbolicAddress::ReportUnalignedAccess, throwLabel);
 }
 
 static const LiveRegisterSet AllRegsExceptSP(
     GeneralRegisterSet(Registers::AllMask & ~(uint32_t(1) << Registers::StackPointer)),
     FloatRegisterSet(FloatRegisters::AllMask));
 
+static const LiveRegisterSet AllAllocatableRegs = LiveRegisterSet(
+    GeneralRegisterSet(Registers::AllocatableMask),
+    FloatRegisterSet(FloatRegisters::AllMask));
+
 // The async interrupt-callback exit is called from arbitrarily-interrupted wasm
 // code. That means we must first save *all* registers and restore *all*
 // registers (except the stack pointer) when we resume. The address to resume to
 // (assuming that js::HandleExecutionInterrupt doesn't indicate that the
 // execution should be aborted) is stored in WasmActivation::resumePC_.
 // Unfortunately, loading this requires a scratch register which we don't have
 // after restoring all registers. To hack around this, push the resumePC on the
 // stack so that it can be popped directly into PC.
@@ -1122,16 +1126,21 @@ wasm::GenerateThrowStub(MacroAssembler& 
 {
     masm.haltingAlign(CodeAlignment);
 
     masm.bind(throwLabel);
 
     Offsets offsets;
     offsets.begin = masm.currentOffset();
 
+    masm.andToStackPtr(Imm32(~(ABIStackAlignment - 1)));
+    if (ShadowStackSpace)
+        masm.subFromStackPtr(Imm32(ShadowStackSpace));
+    masm.call(SymbolicAddress::HandleDebugThrow);
+
     // We are about to pop all frames in this WasmActivation. Set fp to null to
     // maintain the invariant that fp is either null or pointing to a valid
     // frame.
     Register scratch = ABINonArgReturnReg0;
     masm.loadWasmActivationFromSymbolicAddress(scratch);
     masm.storePtr(ImmWord(0), Address(scratch, WasmActivation::offsetOfFP()));
 
     masm.setFramePushed(FramePushedForEntrySP);
@@ -1141,8 +1150,55 @@ wasm::GenerateThrowStub(MacroAssembler& 
     MOZ_ASSERT(masm.framePushed() == 0);
 
     masm.mov(ImmWord(0), ReturnReg);
     masm.ret();
 
     offsets.end = masm.currentOffset();
     return offsets;
 }
+
+// Generate a stub that handle toggable enter/leave frame traps or breakpoints.
+// The trap records frame pointer (via GenerateExitPrologue) and saves most of
+// registers to not affect the code generated by WasmBaselineCompile.
+Offsets
+wasm::GenerateDebugTrapStub(MacroAssembler& masm, Label* throwLabel)
+{
+    masm.haltingAlign(CodeAlignment);
+
+    masm.setFramePushed(0);
+
+    ProfilingOffsets offsets;
+    GenerateExitPrologue(masm, 0, ExitReason::DebugTrap, &offsets);
+
+    // Save all registers used between baseline compiler operations.
+    masm.PushRegsInMask(AllAllocatableRegs);
+
+    uint32_t framePushed = masm.framePushed();
+
+    // This method might be called with unaligned stack -- aligning and
+    // saving old stack pointer at the top.
+    Register scratch = ABINonArgReturnReg0;
+    masm.moveStackPtrTo(scratch);
+    masm.subFromStackPtr(Imm32(sizeof(intptr_t)));
+    masm.andToStackPtr(Imm32(~(ABIStackAlignment - 1)));
+    masm.storePtr(scratch, Address(masm.getStackPointer(), 0));
+
+    if (ShadowStackSpace)
+        masm.subFromStackPtr(Imm32(ShadowStackSpace));
+    masm.assertStackAlignment(ABIStackAlignment);
+    masm.call(SymbolicAddress::HandleDebugTrap);
+
+    masm.branchIfFalseBool(ReturnReg, throwLabel);
+
+    if (ShadowStackSpace)
+        masm.addToStackPtr(Imm32(ShadowStackSpace));
+    masm.Pop(scratch);
+    masm.moveToStackPtr(scratch);
+
+    masm.setFramePushed(framePushed);
+    masm.PopRegsInMask(AllAllocatableRegs);
+
+    GenerateExitEpilogue(masm, 0, ExitReason::DebugTrap, &offsets);
+
+    offsets.end = masm.currentOffset();
+    return offsets;
+}
--- a/js/src/wasm/WasmStubs.h
+++ b/js/src/wasm/WasmStubs.h
@@ -53,12 +53,16 @@ extern Offsets
 GenerateUnalignedExit(jit::MacroAssembler& masm, jit::Label* throwLabel);
 
 extern Offsets
 GenerateInterruptExit(jit::MacroAssembler& masm, jit::Label* throwLabel);
 
 extern Offsets
 GenerateThrowStub(jit::MacroAssembler& masm, jit::Label* throwLabel);
 
+extern Offsets
+GenerateDebugTrapStub(jit::MacroAssembler& masm, jit::Label* throwLabel);
+
+
 } // namespace wasm
 } // namespace js
 
 #endif // wasm_stubs_h
--- a/js/src/wasm/WasmTypes.cpp
+++ b/js/src/wasm/WasmTypes.cpp
@@ -93,16 +93,62 @@ WasmHandleExecutionInterrupt()
     // handling an interrupt.  Note that resumePC has already been copied onto
     // the stack by the interrupt stub, so we can clear it before returning
     // to the stub.
     activation->setResumePC(nullptr);
 
     return success;
 }
 
+static bool
+WasmHandleDebugTrap()
+{
+    WasmActivation* activation = JSRuntime::innermostWasmActivation();
+    JSContext* cx = activation->cx();
+
+    FrameIterator iter(*activation);
+    MOZ_ASSERT(iter.debugEnabled());
+    const CallSite* site = iter.debugTrapCallsite();
+    MOZ_ASSERT(site);
+    if (site->kind() == CallSite::EnterFrame) {
+        if (!iter.instance()->enterFrameTrapsEnabled())
+            return true;
+        DebugFrame* frame = iter.debugFrame();
+        frame->setIsDebuggee();
+        frame->observeFrame(cx);
+        // TODO call onEnterFrame
+        return true;
+    }
+    if (site->kind() == CallSite::LeaveFrame) {
+        DebugFrame* frame = iter.debugFrame();
+        // TODO call onLeaveFrame
+        frame->leaveFrame(cx);
+        return true;
+    }
+    // TODO baseline debug traps
+    MOZ_CRASH();
+    return true;
+}
+
+static void
+WasmHandleDebugThrow()
+{
+    WasmActivation* activation = JSRuntime::innermostWasmActivation();
+    JSContext* cx = activation->cx();
+
+    for (FrameIterator iter(*activation); !iter.done(); ++iter) {
+        if (!iter.debugEnabled())
+            continue;
+
+        DebugFrame* frame = iter.debugFrame();
+        // TODO call onExceptionUnwind and onLeaveFrame
+        frame->leaveFrame(cx);
+     }
+}
+
 static void
 WasmReportTrap(int32_t trapIndex)
 {
     JSContext* cx = JSRuntime::innermostWasmActivation()->cx();
 
     MOZ_ASSERT(trapIndex < int32_t(Trap::Limit) && trapIndex >= 0);
     Trap trap = Trap(trapIndex);
 
@@ -272,16 +318,20 @@ wasm::AddressOf(SymbolicAddress imm, Exc
       case SymbolicAddress::Context:
         return cx->contextAddressForJit();
       case SymbolicAddress::InterruptUint32:
         return cx->runtimeAddressOfInterruptUint32();
       case SymbolicAddress::ReportOverRecursed:
         return FuncCast(WasmReportOverRecursed, Args_General0);
       case SymbolicAddress::HandleExecutionInterrupt:
         return FuncCast(WasmHandleExecutionInterrupt, Args_General0);
+      case SymbolicAddress::HandleDebugTrap:
+        return FuncCast(WasmHandleDebugTrap, Args_General0);
+      case SymbolicAddress::HandleDebugThrow:
+        return FuncCast(WasmHandleDebugThrow, Args_General0);
       case SymbolicAddress::ReportTrap:
         return FuncCast(WasmReportTrap, Args_General1);
       case SymbolicAddress::ReportOutOfBounds:
         return FuncCast(WasmReportOutOfBounds, Args_General0);
       case SymbolicAddress::ReportUnalignedAccess:
         return FuncCast(WasmReportUnalignedAccess, Args_General0);
       case SymbolicAddress::CallImport_Void:
         return FuncCast(Instance::callImport_void, Args_General4);
--- a/js/src/wasm/WasmTypes.h
+++ b/js/src/wasm/WasmTypes.h
@@ -885,24 +885,26 @@ struct TrapOffset
 // While the frame-pointer chain allows the stack to be unwound without
 // metadata, Error.stack still needs to know the line/column of every call in
 // the chain. A CallSiteDesc describes a single callsite to which CallSite adds
 // the metadata necessary to walk up to the next frame. Lastly CallSiteAndTarget
 // adds the function index of the callee.
 
 class CallSiteDesc
 {
-    uint32_t lineOrBytecode_ : 30;
-    uint32_t kind_ : 2;
+    uint32_t lineOrBytecode_ : 29;
+    uint32_t kind_ : 3;
   public:
     enum Kind {
         Func,      // pc-relative call to a specific function
         Dynamic,   // dynamic callee called via register
         Symbolic,  // call to a single symbolic callee
-        TrapExit   // call to a trap exit
+        TrapExit,   // call to a trap exit
+        EnterFrame, // call to a enter frame handler
+        LeaveFrame  // call to a leave frame handler
     };
     CallSiteDesc() {}
     explicit CallSiteDesc(Kind kind)
       : lineOrBytecode_(0), kind_(kind)
     {
         MOZ_ASSERT(kind == Kind(kind_));
     }
     CallSiteDesc(uint32_t lineOrBytecode, Kind kind)
@@ -1009,16 +1011,18 @@ enum class SymbolicAddress
     ExpD,
     LogD,
     PowD,
     ATan2D,
     Context,
     InterruptUint32,
     ReportOverRecursed,
     HandleExecutionInterrupt,
+    HandleDebugTrap,
+    HandleDebugThrow,
     ReportTrap,
     ReportOutOfBounds,
     ReportUnalignedAccess,
     CallImport_Void,
     CallImport_I32,
     CallImport_I64,
     CallImport_F64,
     CoerceInPlace_ToInt32,