new file mode 100644
--- /dev/null
+++ b/js/src/jit-test/tests/debug/wasm-10.js
@@ -0,0 +1,77 @@
+// |jit-test| test-also-wasm-baseline
+// Tests that wasm module scripts has inspectable locals.
+
+load(libdir + "wasm.js");
+load(libdir + 'eqArrayHelper.js');
+
+function monitorLocalValues(wast, lib, expected) {
+ function setupFrame(frame) {
+ var locals = {};
+ framesLocals.push(locals);
+ frame.environment.names().forEach(n => {
+ locals[n] = [frame.environment.getVariable(n)];
+ });
+ frame.onStep = function () {
+ frame.environment.names().forEach(n => {
+ var prevValues = locals[n];
+ if (!prevValues)
+ locals[n] = prevValues = [void 0];
+ var value = frame.environment.getVariable(n);
+ if (prevValues[prevValues.length - 1] !== value)
+ prevValues.push(value);
+ });
+ }
+ }
+ var framesLocals = [];
+ wasmRunWithDebugger(wast, lib,
+ function ({dbg}) {
+ dbg.onEnterFrame = function(frame) {
+ if (frame.type == "wasmcall")
+ setupFrame(frame);
+ }
+ },
+ function ({error}) {
+ assertEq(error, undefined);
+ }
+ );
+ assertEq(framesLocals.length, expected.length);
+ for (var i = 0; i < framesLocals.length; i++) {
+ var frameLocals = framesLocals[i];
+ var expectedLocals = expected[i];
+ var localsNames = Object.keys(frameLocals);
+ assertEq(localsNames.length, Object.keys(expectedLocals).length);
+ localsNames.forEach(n => {
+ assertEqArray(frameLocals[n], expectedLocals[n]);
+ });
+ }
+}
+
+monitorLocalValues(
+ '(module (func (nop) (nop)) (export "test" 0))',
+ undefined,
+ [{}]
+);
+monitorLocalValues(
+ '(module (func (export "test") (local i32) (i32.const 1) (set_local 0)))',
+ undefined,
+ [{var0: [0, 1]}]
+);
+monitorLocalValues(
+ '(module (func (export "test") (local f32) (f32.const 1.5) (set_local 0)))',
+ undefined,
+ [{var0: [0, 1.5]}]
+);
+monitorLocalValues(
+ '(module (func (export "test") (local f64) (f64.const 42.25) (set_local 0)))',
+ undefined,
+ [{var0: [0, 42.25]}]
+);
+monitorLocalValues(
+ `(module
+ (func (param i32) (result i32) (get_local 0) (i32.const 2) (i32.add))
+ (func (param i32) (local i32) (get_local 0) (call 0) (set_local 1))
+ (func (export "test") (i32.const 1) (call 1))
+)`.replace(/\n/g, " "),
+ undefined,
+ [{}, {var0: [1], var1: [0, 3]}, {var0: [1]}]
+);
--- a/js/src/vm/EnvironmentObject.cpp
+++ b/js/src/vm/EnvironmentObject.cpp
@@ -1279,18 +1279,20 @@ EnvironmentIter::settle()
env_ = &env_->as<EnvironmentObject>().enclosingEnvironment();
}
incrementScopeIter();
}
}
// Check if we have left the extent of the initial frame after we've
// settled on a static scope.
- if (frame_ && (frame_.isWasmDebugFrame() ||
- (!si_ || si_.scope() == frame_.script()->enclosingScope())))
+ if (frame_ &&
+ (!si_ ||
+ (frame_.hasScript() && si_.scope() == frame_.script()->enclosingScope()) ||
+ (frame_.isWasmDebugFrame() && !si_.scope()->is<WasmFunctionScope>())))
{
frame_ = NullFramePtr();
}
#ifdef DEBUG
if (si_) {
if (hasSyntacticEnvironment()) {
Scope* scope = si_.scope();
@@ -1633,16 +1635,45 @@ class DebugEnvironmentProxyHandler : pub
if (vp.isMagic() && vp.whyMagic() == JS_OPTIMIZED_OUT)
*accessResult = ACCESS_LOST;
else
*accessResult = ACCESS_UNALIASED;
return true;
}
+ if (env->is<WasmFunctionCallObject>()) {
+ if (maybeLiveEnv) {
+ RootedScope scope(cx, getEnvironmentScope(*env));
+ uint32_t index = 0;
+ for (BindingIter bi(scope); bi; bi++) {
+ if (JSID_IS_ATOM(id, bi.name()))
+ break;
+ MOZ_ASSERT(!bi.isLast());
+ index++;
+ }
+
+ AbstractFramePtr frame = maybeLiveEnv->frame();
+ MOZ_ASSERT(frame.isWasmDebugFrame());
+ wasm::DebugFrame* wasmFrame = frame.asWasmDebugFrame();
+ if (action == GET) {
+ if (!wasmFrame->getLocal(index, vp)) {
+ ReportOutOfMemory(cx);
+ return false;
+ }
+ *accessResult = ACCESS_UNALIASED;
+ } else { // if (action == SET)
+ // TODO
+ }
+ } else {
+ *accessResult = ACCESS_LOST;
+ }
+ return true;
+ }
+
/* The rest of the internal scopes do not have unaliased vars. */
MOZ_ASSERT(!IsSyntacticEnvironment(env) ||
env->is<WithEnvironmentObject>());
return true;
}
static bool isArguments(JSContext* cx, jsid id)
{
@@ -1667,16 +1698,18 @@ class DebugEnvironmentProxyHandler : pub
static Scope* getEnvironmentScope(const JSObject& env)
{
if (isFunctionEnvironment(env))
return env.as<CallObject>().callee().nonLazyScript()->bodyScope();
if (isNonExtensibleLexicalEnvironment(env))
return &env.as<LexicalEnvironmentObject>().scope();
if (env.is<VarEnvironmentObject>())
return &env.as<VarEnvironmentObject>().scope();
+ if (env.is<WasmFunctionCallObject>())
+ return &env.as<WasmFunctionCallObject>().scope();
return nullptr;
}
/*
* In theory, every non-arrow function scope contains an 'arguments'
* bindings. However, the engine only adds a binding if 'arguments' is
* used in the function body. Thus, from the debugger's perspective,
* 'arguments' may be missing from the list of bindings.
--- a/js/src/vm/EnvironmentObject.h
+++ b/js/src/vm/EnvironmentObject.h
@@ -436,16 +436,21 @@ class WasmFunctionCallObject : public En
public:
static const Class class_;
static const uint32_t RESERVED_SLOTS = 2;
static WasmFunctionCallObject* createHollowForDebug(JSContext* cx,
Handle<WasmFunctionScope*> scope);
+ WasmFunctionScope& scope() const {
+ Value v = getReservedSlot(SCOPE_SLOT);
+ MOZ_ASSERT(v.isPrivateGCThing());
+ return *static_cast<WasmFunctionScope*>(v.toGCThing());
+ }
};
class LexicalEnvironmentObject : public EnvironmentObject
{
// Global and non-syntactic lexical environments need to store a 'this'
// value and all other lexical environments have a fixed shape and store a
// backpointer to the LexicalScope.
//
--- a/js/src/vm/Scope.cpp
+++ b/js/src/vm/Scope.cpp
@@ -8,16 +8,18 @@
#include "mozilla/ScopeExit.h"
#include "jsscript.h"
#include "builtin/ModuleObject.h"
#include "gc/Allocator.h"
#include "vm/EnvironmentObject.h"
#include "vm/Runtime.h"
+#include "vm/StringBuffer.h"
+#include "wasm/WasmInstance.h"
#include "vm/Shape-inl.h"
using namespace js;
using mozilla::Maybe;
using mozilla::MakeScopeExit;
using mozilla::Move;
@@ -1183,34 +1185,58 @@ ModuleScope::script() const
return module()->script();
}
// TODO Check what Debugger behavior should be when it evaluates a
// var declaration.
static const uint32_t WasmFunctionEnvShapeFlags =
BaseShape::NOT_EXTENSIBLE | BaseShape::DELEGATE;
+static JSAtom*
+GenerateWasmVariableName(JSContext* cx, uint32_t index)
+{
+ StringBuffer sb(cx);
+ if (!sb.append("var"))
+ return nullptr;
+ if (!NumberValueToStringBuffer(cx, Int32Value(index), sb))
+ return nullptr;
+
+ return sb.finishAtom();
+}
+
/* static */ WasmFunctionScope*
WasmFunctionScope::create(JSContext* cx, WasmInstanceObject* instance, uint32_t funcIndex)
{
// WasmFunctionScope::Data has GCManagedDeletePolicy because it contains a
// GCPtr. Destruction of |data| below may trigger calls into the GC.
Rooted<WasmFunctionScope*> wasmFunctionScope(cx);
{
// TODO pull the local variable names from the wasm function definition.
+ wasm::ValTypeVector locals;
+ size_t argsLength;
+ if (!instance->instance().code().debugGetLocalTypes(funcIndex, &locals, &argsLength))
+ return nullptr;
+ uint32_t namesCount = locals.length();
- Rooted<UniquePtr<Data>> data(cx, NewEmptyScopeData<WasmFunctionScope>(cx));
+ Rooted<UniquePtr<Data>> data(cx, NewEmptyScopeData<WasmFunctionScope>(cx, namesCount));
if (!data)
return nullptr;
Rooted<Scope*> enclosingScope(cx, &cx->global()->emptyGlobalScope());
data->instance.init(instance);
data->funcIndex = funcIndex;
+ data->length = namesCount;
+ for (size_t i = 0; i < namesCount; i++) {
+ RootedAtom name(cx, GenerateWasmVariableName(cx, i));
+ if (!name)
+ return nullptr;
+ data->names[i] = BindingName(name, false);
+ }
Scope* scope = Scope::create(cx, ScopeKind::WasmFunction, enclosingScope, /* envShape = */ nullptr);
if (!scope)
return nullptr;
wasmFunctionScope = &scope->as<WasmFunctionScope>();
wasmFunctionScope->initData(Move(data.get()));
}
@@ -1411,20 +1437,20 @@ BindingIter::init(ModuleScope::Data& dat
void
BindingIter::init(WasmFunctionScope::Data& data)
{
// imports - [0, 0)
// positional formals - [0, 0)
// other formals - [0, 0)
// top-level funcs - [0, 0)
- // vars - [0, 0)
- // lets - [0, 0)
- // consts - [0, 0)
- init(0, 0, 0, 0, 0, 0,
+ // vars - [0, data.length)
+ // lets - [data.length, data.length)
+ // consts - [data.length, data.length)
+ init(0, 0, 0, 0, data.length, data.length,
CanHaveFrameSlots | CanHaveEnvironmentSlots,
UINT32_MAX, UINT32_MAX,
data.names, data.length);
}
PositionalFormalParameterIter::PositionalFormalParameterIter(JSScript* script)
: BindingIter(script)
{
--- a/js/src/wasm/WasmBaselineCompile.cpp
+++ b/js/src/wasm/WasmBaselineCompile.cpp
@@ -189,16 +189,134 @@ struct RegTypeOf {
template<> struct RegTypeOf<MIRType::Float32> {
static constexpr RegTypeName value = RegTypeName::Float32;
};
template<> struct RegTypeOf<MIRType::Double> {
static constexpr RegTypeName value = RegTypeName::Float64;
};
+static constexpr int32_t TlsSlotSize = sizeof(void*);
+static constexpr int32_t TlsSlotOffset = TlsSlotSize;
+
+BaseLocalIter::BaseLocalIter(const ValTypeVector& locals,
+ size_t argsLength,
+ bool debugEnabled)
+ : locals_(locals),
+ argsLength_(argsLength),
+ argsRange_(locals.begin(), argsLength),
+ argsIter_(argsRange_),
+ index_(0),
+ localSize_(0),
+ done_(false)
+{
+ MOZ_ASSERT(argsLength <= locals.length());
+
+ // Reserve a stack slot for the TLS pointer outside the locals range so it
+ // isn't zero-filled like the normal locals.
+ DebugOnly<int32_t> tlsSlotOffset = pushLocal(TlsSlotSize);
+ MOZ_ASSERT(tlsSlotOffset == TlsSlotOffset);
+ if (debugEnabled) {
+ // If debug information is generated, constructing DebugFrame record:
+ // reserving some data before TLS pointer. The TLS pointer allocated
+ // above and regular wasm::Frame data starts after locals.
+ localSize_ += DebugFrame::offsetOfTlsData();
+ MOZ_ASSERT(DebugFrame::offsetOfFrame() == localSize_);
+ }
+ reservedSize_ = localSize_;
+
+ settle();
+}
+
+int32_t
+BaseLocalIter::pushLocal(size_t nbytes)
+{
+ if (nbytes == 8)
+ localSize_ = AlignBytes(localSize_, 8u);
+ else if (nbytes == 16)
+ localSize_ = AlignBytes(localSize_, 16u);
+ localSize_ += nbytes;
+ return localSize_; // Locals grow down so capture base address
+}
+
+void
+BaseLocalIter::settle()
+{
+ if (index_ < argsLength_) {
+ MOZ_ASSERT(!argsIter_.done());
+ mirType_ = argsIter_.mirType();
+ switch (mirType_) {
+ case MIRType::Int32:
+ if (argsIter_->argInRegister())
+ frameOffset_ = pushLocal(4);
+ else
+ frameOffset_ = -(argsIter_->offsetFromArgBase() + sizeof(Frame));
+ break;
+ case MIRType::Int64:
+ if (argsIter_->argInRegister())
+ frameOffset_ = pushLocal(8);
+ else
+ frameOffset_ = -(argsIter_->offsetFromArgBase() + sizeof(Frame));
+ break;
+ case MIRType::Double:
+ if (argsIter_->argInRegister())
+ frameOffset_ = pushLocal(8);
+ else
+ frameOffset_ = -(argsIter_->offsetFromArgBase() + sizeof(Frame));
+ break;
+ case MIRType::Float32:
+ if (argsIter_->argInRegister())
+ frameOffset_ = pushLocal(4);
+ else
+ frameOffset_ = -(argsIter_->offsetFromArgBase() + sizeof(Frame));
+ break;
+ default:
+ MOZ_CRASH("Argument type");
+ }
+ return;
+ }
+
+ MOZ_ASSERT(argsIter_.done());
+ if (index_ < locals_.length()) {
+ switch (locals_[index_]) {
+ case ValType::I32:
+ mirType_ = jit::MIRType::Int32;
+ frameOffset_ = pushLocal(4);
+ break;
+ case ValType::F32:
+ mirType_ = jit::MIRType::Float32;
+ frameOffset_ = pushLocal(4);
+ break;
+ case ValType::F64:
+ mirType_ = jit::MIRType::Double;
+ frameOffset_ = pushLocal(8);
+ break;
+ case ValType::I64:
+ mirType_ = jit::MIRType::Int64;
+ frameOffset_ = pushLocal(8);
+ break;
+ default:
+ MOZ_CRASH("Compiler bug: Unexpected local type");
+ }
+ return;
+ }
+
+ done_ = true;
+}
+
+void
+BaseLocalIter::operator++(int)
+{
+ MOZ_ASSERT(!done_);
+ index_++;
+ if (!argsIter_.done())
+ argsIter_++;
+ settle();
+}
+
class BaseCompiler
{
// We define our own ScratchRegister abstractions, deferring to
// the platform's when possible.
#if defined(JS_CODEGEN_X64) || defined(JS_CODEGEN_X86) || defined(JS_CODEGEN_ARM)
typedef ScratchDoubleScope ScratchF64;
#else
@@ -641,27 +759,16 @@ class BaseCompiler
void loadFromFrameF64(FloatRegister r, int32_t offset) {
masm.loadDouble(Address(StackPointer, localOffsetToSPOffset(offset)), r);
}
void loadFromFrameF32(FloatRegister r, int32_t offset) {
masm.loadFloat32(Address(StackPointer, localOffsetToSPOffset(offset)), r);
}
- // Stack-allocated local slots.
-
- int32_t pushLocal(size_t nbytes) {
- if (nbytes == 8)
- localSize_ = AlignBytes(localSize_, 8u);
- else if (nbytes == 16)
- localSize_ = AlignBytes(localSize_, 16u);
- localSize_ += nbytes;
- return localSize_; // Locals grow down so capture base address
- }
-
int32_t frameOffsetFromSlot(uint32_t slot, MIRType type) {
MOZ_ASSERT(localInfo_[slot].type() == type);
return localInfo_[slot].offs();
}
////////////////////////////////////////////////////////////
//
// Low-level register allocation.
@@ -7417,86 +7524,36 @@ BaseCompiler::init()
// localInfo_ contains an entry for every local in locals_, followed by
// entries for special locals. Currently the only special local is the TLS
// pointer.
tlsSlot_ = locals_.length();
if (!localInfo_.resize(locals_.length() + 1))
return false;
- localSize_ = 0;
-
- // Reserve a stack slot for the TLS pointer outside the varLow..varHigh
- // range so it isn't zero-filled like the normal locals.
- localInfo_[tlsSlot_].init(MIRType::Pointer, pushLocal(sizeof(void*)));
- if (debugEnabled_) {
- // If debug information is generated, constructing DebugFrame record:
- // reserving some data before TLS pointer. The TLS pointer allocated
- // above and regular wasm::Frame data starts after locals.
- localSize_ += DebugFrame::offsetOfTlsData();
- MOZ_ASSERT(DebugFrame::offsetOfFrame() == localSize_);
- }
-
- for (ABIArgIter<const ValTypeVector> i(args); !i.done(); i++) {
+ localInfo_[tlsSlot_].init(MIRType::Pointer, TlsSlotOffset);
+
+ BaseLocalIter i(locals_, args.length(), debugEnabled_);
+ varLow_ = i.reservedSize();
+ for (; !i.done() && i.index() < args.length(); i++) {
+ MOZ_ASSERT(i.isArg());
Local& l = localInfo_[i.index()];
- switch (i.mirType()) {
- case MIRType::Int32:
- if (i->argInRegister())
- l.init(MIRType::Int32, pushLocal(4));
- else
- l.init(MIRType::Int32, -(i->offsetFromArgBase() + sizeof(Frame)));
- break;
- case MIRType::Int64:
- if (i->argInRegister())
- l.init(MIRType::Int64, pushLocal(8));
- else
- l.init(MIRType::Int64, -(i->offsetFromArgBase() + sizeof(Frame)));
- break;
- case MIRType::Double:
- if (i->argInRegister())
- l.init(MIRType::Double, pushLocal(8));
- else
- l.init(MIRType::Double, -(i->offsetFromArgBase() + sizeof(Frame)));
- break;
- case MIRType::Float32:
- if (i->argInRegister())
- l.init(MIRType::Float32, pushLocal(4));
- else
- l.init(MIRType::Float32, -(i->offsetFromArgBase() + sizeof(Frame)));
- break;
- default:
- MOZ_CRASH("Argument type");
- }
- }
-
- varLow_ = localSize_;
-
- for (size_t i = args.length(); i < locals_.length(); i++) {
- Local& l = localInfo_[i];
- switch (locals_[i]) {
- case ValType::I32:
- l.init(MIRType::Int32, pushLocal(4));
- break;
- case ValType::F32:
- l.init(MIRType::Float32, pushLocal(4));
- break;
- case ValType::F64:
- l.init(MIRType::Double, pushLocal(8));
- break;
- case ValType::I64:
- l.init(MIRType::Int64, pushLocal(8));
- break;
- default:
- MOZ_CRASH("Compiler bug: Unexpected local type");
- }
- }
-
- varHigh_ = localSize_;
-
- localSize_ = AlignBytes(localSize_, 16u);
+ l.init(i.mirType(), i.frameOffset());
+ varLow_ = i.currentLocalSize();
+ }
+
+ varHigh_ = varLow_;
+ for (; !i.done() ; i++) {
+ MOZ_ASSERT(!i.isArg());
+ Local& l = localInfo_[i.index()];
+ l.init(i.mirType(), i.frameOffset());
+ varHigh_ = i.currentLocalSize();
+ }
+
+ localSize_ = AlignBytes(varHigh_, 16u);
addInterruptCheck();
return true;
}
FuncOffsets
BaseCompiler::finish()
--- a/js/src/wasm/WasmBaselineCompile.h
+++ b/js/src/wasm/WasmBaselineCompile.h
@@ -14,16 +14,17 @@
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef asmjs_wasm_baseline_compile_h
#define asmjs_wasm_baseline_compile_h
+#include "wasm/WasmGenerator.h"
#include "wasm/WasmTypes.h"
namespace js {
namespace wasm {
class CompileTask;
class FuncCompileUnit;
@@ -31,12 +32,47 @@ class FuncCompileUnit;
// Note: asm.js is also currently not supported due to Atomics and SIMD.
bool
BaselineCanCompile();
// Generate adequate code quickly.
bool
BaselineCompileFunction(CompileTask* task, FuncCompileUnit* unit, UniqueChars* error);
+class BaseLocalIter
+{
+ private:
+ using ConstValTypeRange = mozilla::Range<const ValType>;
+
+ const ValTypeVector& locals_;
+ size_t argsLength_;
+ ConstValTypeRange argsRange_; // range struct cache for ABIArgIter
+ jit::ABIArgIter<ConstValTypeRange> argsIter_;
+ size_t index_;
+ int32_t localSize_;
+ int32_t reservedSize_;
+ int32_t frameOffset_;
+ jit::MIRType mirType_;
+ bool done_;
+
+ void settle();
+ int32_t pushLocal(size_t nbytes);
+
+ public:
+ BaseLocalIter(const ValTypeVector& locals, size_t argsLength, bool debugEnabled);
+ void operator++(int);
+ bool done() const { return done_; }
+
+ jit::MIRType mirType() const { MOZ_ASSERT(!done_); return mirType_; }
+ int32_t frameOffset() const { MOZ_ASSERT(!done_); return frameOffset_; }
+ size_t index() const { MOZ_ASSERT(!done_); return index_; }
+ int32_t currentLocalSize() const { return localSize_; }
+ int32_t reservedSize() const { return reservedSize_; }
+
+#ifdef DEBUG
+ bool isArg() const { MOZ_ASSERT(!done_); return !argsIter_.done(); }
+#endif
+};
+
} // namespace wasm
} // namespace js
#endif // asmjs_wasm_baseline_compile_h
--- a/js/src/wasm/WasmCode.cpp
+++ b/js/src/wasm/WasmCode.cpp
@@ -31,16 +31,17 @@
# include "jit/PerfSpewer.h"
#endif
#include "vm/Debugger.h"
#include "vm/StringBuffer.h"
#include "vtune/VTuneWrapper.h"
#include "wasm/WasmBinaryToText.h"
#include "wasm/WasmModule.h"
#include "wasm/WasmSerialize.h"
+#include "wasm/WasmValidate.h"
#include "jsobjinlines.h"
#include "jit/MacroAssembler-inl.h"
#include "vm/ArrayBufferObject-inl.h"
using namespace js;
using namespace js::jit;
@@ -464,17 +465,18 @@ Metadata::serializedSize() const
SerializedPodVectorSize(funcNames) +
SerializedPodVectorSize(customSections) +
filename.serializedSize();
}
uint8_t*
Metadata::serialize(uint8_t* cursor) const
{
- MOZ_ASSERT(!debugEnabled && debugTrapFarJumpOffsets.empty());
+ MOZ_ASSERT(!debugEnabled && debugTrapFarJumpOffsets.empty() &&
+ debugFuncArgTypes.empty() && debugFuncToCodeRange.empty());
cursor = WriteBytes(cursor, &pod(), sizeof(pod()));
cursor = SerializeVector(cursor, funcImports);
cursor = SerializeVector(cursor, funcExports);
cursor = SerializeVector(cursor, sigIds);
cursor = SerializePodVector(cursor, globals);
cursor = SerializePodVector(cursor, tables);
cursor = SerializePodVector(cursor, memoryAccesses);
cursor = SerializePodVector(cursor, memoryPatches);
@@ -503,16 +505,18 @@ Metadata::deserialize(const uint8_t* cur
(cursor = DeserializePodVector(cursor, &codeRanges)) &&
(cursor = DeserializePodVector(cursor, &callSites)) &&
(cursor = DeserializePodVector(cursor, &callThunks)) &&
(cursor = DeserializePodVector(cursor, &funcNames)) &&
(cursor = DeserializePodVector(cursor, &customSections)) &&
(cursor = filename.deserialize(cursor));
debugEnabled = false;
debugTrapFarJumpOffsets.clear();
+ debugFuncToCodeRange.clear();
+ debugFuncArgTypes.clear();
return cursor;
}
size_t
Metadata::sizeOfExcludingThis(MallocSizeOf mallocSizeOf) const
{
return SizeOfVectorExcludingThis(funcImports, mallocSizeOf) +
SizeOfVectorExcludingThis(funcExports, mallocSizeOf) +
@@ -664,26 +668,16 @@ Code::lookupRange(void* pc) const
size_t match;
if (!BinarySearch(metadata_->codeRanges, lowerBound, upperBound, target, &match))
return nullptr;
return &metadata_->codeRanges[match];
}
-const CodeRange*
-Code::lookupRangeByFuncIndexSlow(uint32_t funcIndex) const
-{
- for (const CodeRange& r : metadata_->codeRanges) {
- if (r.kind() == CodeRange::Function && r.funcIndex() == funcIndex)
- return &r;
- }
- return nullptr;
-}
-
struct MemoryAccessOffset
{
const MemoryAccessVector& accesses;
explicit MemoryAccessOffset(const MemoryAccessVector& accesses) : accesses(accesses) {}
uintptr_t operator[](size_t index) const {
return accesses[index].insnOffset();
}
};
@@ -881,18 +875,18 @@ Code::stepModeEnabled(uint32_t funcIndex
{
return stepModeCounters_.initialized() && stepModeCounters_.lookup(funcIndex);
}
bool
Code::incrementStepModeCount(JSContext* cx, uint32_t funcIndex)
{
MOZ_ASSERT(metadata_->debugEnabled);
- const CodeRange* codeRange = lookupRangeByFuncIndexSlow(funcIndex);
- MOZ_ASSERT(codeRange && codeRange->isFunction());
+ const CodeRange& codeRange = metadata_->codeRanges[metadata_->debugFuncToCodeRange[funcIndex]];
+ MOZ_ASSERT(codeRange.isFunction());
if (!stepModeCounters_.initialized() && !stepModeCounters_.init()) {
ReportOutOfMemory(cx);
return false;
}
StepModeCounters::AddPtr p = stepModeCounters_.lookupForAdd(funcIndex);
if (p) {
@@ -900,54 +894,54 @@ Code::incrementStepModeCount(JSContext*
p->value()++;
return true;
}
if (!stepModeCounters_.add(p, funcIndex, 1)) {
ReportOutOfMemory(cx);
return false;
}
- AutoWritableJitCode awjc(cx->runtime(), segment_->base() + codeRange->begin(),
- codeRange->end() - codeRange->begin());
+ AutoWritableJitCode awjc(cx->runtime(), segment_->base() + codeRange.begin(),
+ codeRange.end() - codeRange.begin());
AutoFlushICache afc("Code::incrementStepModeCount");
for (const CallSite& callSite : metadata_->callSites) {
if (callSite.kind() != CallSite::Breakpoint)
continue;
uint32_t offset = callSite.returnAddressOffset();
- if (codeRange->begin() <= offset && offset <= codeRange->end())
+ if (codeRange.begin() <= offset && offset <= codeRange.end())
toggleDebugTrap(offset, true);
}
return true;
}
bool
Code::decrementStepModeCount(JSContext* cx, uint32_t funcIndex)
{
MOZ_ASSERT(metadata_->debugEnabled);
- const CodeRange* codeRange = lookupRangeByFuncIndexSlow(funcIndex);
- MOZ_ASSERT(codeRange && codeRange->isFunction());
+ const CodeRange& codeRange = metadata_->codeRanges[metadata_->debugFuncToCodeRange[funcIndex]];
+ MOZ_ASSERT(codeRange.isFunction());
MOZ_ASSERT(stepModeCounters_.initialized() && !stepModeCounters_.empty());
StepModeCounters::Ptr p = stepModeCounters_.lookup(funcIndex);
MOZ_ASSERT(p);
if (--p->value())
return true;
stepModeCounters_.remove(p);
- AutoWritableJitCode awjc(cx->runtime(), segment_->base() + codeRange->begin(),
- codeRange->end() - codeRange->begin());
+ AutoWritableJitCode awjc(cx->runtime(), segment_->base() + codeRange.begin(),
+ codeRange.end() - codeRange.begin());
AutoFlushICache afc("Code::decrementStepModeCount");
for (const CallSite& callSite : metadata_->callSites) {
if (callSite.kind() != CallSite::Breakpoint)
continue;
uint32_t offset = callSite.returnAddressOffset();
- if (codeRange->begin() <= offset && offset <= codeRange->end()) {
+ if (codeRange.begin() <= offset && offset <= codeRange.end()) {
bool enabled = breakpointSites_.initialized() && breakpointSites_.has(offset);
toggleDebugTrap(offset, enabled);
}
}
return true;
}
static const CallSite*
@@ -1174,16 +1168,36 @@ Code::adjustEnterAndLeaveFrameTrapsState
AutoFlushICache::setRange(uintptr_t(segment_->base()), segment_->length());
for (const CallSite& callSite : metadata_->callSites) {
if (callSite.kind() != CallSite::EnterFrame && callSite.kind() != CallSite::LeaveFrame)
continue;
toggleDebugTrap(callSite.returnAddressOffset(), stillEnabled);
}
}
+bool
+Code::debugGetLocalTypes(uint32_t funcIndex, ValTypeVector* locals, size_t* argsLength)
+{
+ MOZ_ASSERT(metadata_->debugEnabled);
+
+ const ValTypeVector& args = metadata_->debugFuncArgTypes[funcIndex];
+ *argsLength = args.length();
+ if (!locals->appendAll(args))
+ return false;
+
+ // Decode local var types from wasm binary function body.
+ const CodeRange& range = metadata_->codeRanges[metadata_->debugFuncToCodeRange[funcIndex]];
+ // In wasm, the Code points to the function start via funcLineOrBytecode.
+ MOZ_ASSERT(!metadata_->isAsmJS() && maybeBytecode_);
+ size_t offsetInModule = range.funcLineOrBytecode();
+ Decoder d(maybeBytecode_->begin() + offsetInModule, maybeBytecode_->end(),
+ offsetInModule, /* error = */ nullptr);
+ return DecodeLocalEntries(d, metadata_->kind, locals);
+}
+
void
Code::addSizeOfMisc(MallocSizeOf mallocSizeOf,
Metadata::SeenSet* seenMetadata,
ShareableBytes::SeenSet* seenBytes,
size_t* code,
size_t* data) const
{
*code += segment_->length();
--- a/js/src/wasm/WasmCode.h
+++ b/js/src/wasm/WasmCode.h
@@ -422,16 +422,17 @@ struct CustomSection
CustomSection() = default;
CustomSection(NameInBytecode name, uint32_t offset, uint32_t length)
: name(name), offset(offset), length(length)
{}
};
typedef Vector<CustomSection, 0, SystemAllocPolicy> CustomSectionVector;
+typedef Vector<ValTypeVector, 0, SystemAllocPolicy> FuncArgTypesVector;
// Metadata holds all the data that is needed to describe compiled wasm code
// at runtime (as opposed to data that is only used to statically link or
// instantiate a module).
//
// Metadata is built incrementally by ModuleGenerator and then shared immutably
// between modules.
@@ -471,16 +472,18 @@ struct Metadata : ShareableBase<Metadata
CallThunkVector callThunks;
NameInBytecodeVector funcNames;
CustomSectionVector customSections;
CacheableChars filename;
// Debug-enabled code is not serialized.
bool debugEnabled;
Uint32Vector debugTrapFarJumpOffsets;
+ Uint32Vector debugFuncToCodeRange;
+ FuncArgTypesVector debugFuncArgTypes;
bool usesMemory() const { return UsesMemory(memoryUsage); }
bool hasSharedMemory() const { return memoryUsage == MemoryUsage::Shared; }
const FuncExport& lookupFuncExport(uint32_t funcIndex) const;
// AsmJSMetadata derives Metadata iff isAsmJS(). Mostly this distinction is
// encapsulated within AsmJS.cpp, but the additional virtual functions allow
@@ -609,17 +612,16 @@ class Code
CodeSegment& segment() { return *segment_; }
const CodeSegment& segment() const { return *segment_; }
const Metadata& metadata() const { return *metadata_; }
// Frame iterator support:
const CallSite* lookupCallSite(void* returnAddress) const;
const CodeRange* lookupRange(void* pc) const;
- const CodeRange* lookupRangeByFuncIndexSlow(uint32_t funcIndex) const;
const MemoryAccess* lookupMemoryAccess(void* pc) const;
// Return the name associated with a given function index, or generate one
// if none was given by the module.
bool getFuncName(uint32_t funcIndex, UTF8Bytes* name) const;
JSAtom* getFuncAtom(JSContext* cx, uint32_t funcIndex) const;
@@ -661,16 +663,20 @@ class Code
// When the Code is debug-enabled, single-stepping mode can be toggled on
// the granularity of individual functions.
bool stepModeEnabled(uint32_t funcIndex) const;
bool incrementStepModeCount(JSContext* cx, uint32_t funcIndex);
bool decrementStepModeCount(JSContext* cx, uint32_t funcIndex);
+ // Stack inspection helpers.
+
+ bool debugGetLocalTypes(uint32_t funcIndex, ValTypeVector* locals, size_t* argsLength);
+
// about:memory reporting:
void addSizeOfMisc(MallocSizeOf mallocSizeOf,
Metadata::SeenSet* seenMetadata,
ShareableBytes::SeenSet* seenBytes,
size_t* code,
size_t* data) const;
--- a/js/src/wasm/WasmDebugFrame.cpp
+++ b/js/src/wasm/WasmDebugFrame.cpp
@@ -14,16 +14,17 @@
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "wasm/WasmDebugFrame.h"
#include "vm/EnvironmentObject.h"
+#include "wasm/WasmBaselineCompile.h"
#include "wasm/WasmInstance.h"
#include "jsobjinlines.h"
using namespace js;
using namespace js::wasm;
Instance*
@@ -58,8 +59,44 @@ void
DebugFrame::leaveFrame(JSContext* cx)
{
if (!observing_)
return;
instance()->code().adjustEnterAndLeaveFrameTrapsState(cx, /* enabled = */ false);
observing_ = false;
}
+
+bool
+DebugFrame::getLocal(uint32_t localIndex, MutableHandleValue vp)
+{
+ ValTypeVector locals;
+ size_t argsLength;
+ if (!instance()->code().debugGetLocalTypes(funcIndex(), &locals, &argsLength))
+ return false;
+
+ BaseLocalIter iter(locals, argsLength, /* debugEnabled = */ true);
+ while (!iter.done() && iter.index() < localIndex)
+ iter++;
+ MOZ_ALWAYS_TRUE(!iter.done());
+
+ uint8_t* frame = static_cast<uint8_t*>((void*)this) + offsetOfFrame();
+ void* dataPtr = frame - iter.frameOffset();
+ switch (iter.mirType()) {
+ case jit::MIRType::Int32:
+ vp.set(Int32Value(*static_cast<int32_t*>(dataPtr)));
+ break;
+ case jit::MIRType::Int64:
+ // Just display as a Number; it's ok if we lose some precision
+ vp.set(NumberValue((double)*static_cast<int64_t*>(dataPtr)));
+ break;
+ case jit::MIRType::Float32:
+ vp.set(NumberValue(*static_cast<float*>(dataPtr)));
+ break;
+ case jit::MIRType::Double:
+ vp.set(NumberValue(*static_cast<double*>(dataPtr)));
+ break;
+ default:
+ MOZ_CRASH("local type");
+ }
+ return true;
+}
+
--- a/js/src/wasm/WasmDebugFrame.h
+++ b/js/src/wasm/WasmDebugFrame.h
@@ -86,16 +86,18 @@ class DebugFrame
inline void setPrevUpToDate() { prevUpToDate_ = true; }
inline void unsetPrevUpToDate() { prevUpToDate_ = false; }
inline bool hasCachedSavedFrame() const { return hasCachedSavedFrame_; }
inline void setHasCachedSavedFrame() { hasCachedSavedFrame_ = true; }
inline void* resultsPtr() { return &resultI32_; }
+ bool getLocal(uint32_t localIndex, MutableHandleValue vp);
+
static constexpr size_t offsetOfResults() { return offsetof(DebugFrame, resultI32_); }
static constexpr size_t offsetOfFlagsWord() { return offsetof(DebugFrame, reserved1_); }
static constexpr size_t offsetOfFuncIndex() { return offsetof(DebugFrame, funcIndex_); }
static constexpr size_t offsetOfTlsData() { return offsetof(DebugFrame, tlsData_); }
static constexpr size_t offsetOfFrame() { return offsetof(DebugFrame, frame_); }
};
static_assert(DebugFrame::offsetOfResults() == 0, "results shall be at offset 0");
--- a/js/src/wasm/WasmGenerator.cpp
+++ b/js/src/wasm/WasmGenerator.cpp
@@ -196,16 +196,25 @@ ModuleGenerator::initWasm(const CompileA
}
if (env_->startFuncIndex) {
metadata_->startFuncIndex.emplace(*env_->startFuncIndex);
if (!exportedFuncs_.put(*env_->startFuncIndex))
return false;
}
+ if (metadata_->debugEnabled) {
+ if (!debugFuncArgTypes_.resize(env_->funcSigs.length()))
+ return false;
+ for (size_t i = 0; i < debugFuncArgTypes_.length(); i++) {
+ if (!debugFuncArgTypes_[i].appendAll(env_->funcSigs[i]->args()))
+ return false;
+ }
+ }
+
return true;
}
bool
ModuleGenerator::init(UniqueModuleEnvironment env, const CompileArgs& args,
Metadata* maybeAsmJSMetadata)
{
env_ = Move(env);
@@ -1144,25 +1153,31 @@ ModuleGenerator::finish(const ShareableB
metadata_->memoryUsage = env_->memoryUsage;
metadata_->minMemoryLength = env_->minMemoryLength;
metadata_->maxMemoryLength = env_->maxMemoryLength;
metadata_->tables = Move(env_->tables);
metadata_->globals = Move(env_->globals);
metadata_->funcNames = Move(env_->funcNames);
metadata_->customSections = Move(env_->customSections);
+ // Additional debug information to copy.
+ metadata_->debugFuncArgTypes = Move(debugFuncArgTypes_);
+ if (metadata_->debugEnabled)
+ metadata_->debugFuncToCodeRange = Move(funcToCodeRange_);
+
// These Vectors can get large and the excess capacity can be significant,
// so realloc them down to size.
metadata_->memoryAccesses.podResizeToFit();
metadata_->memoryPatches.podResizeToFit();
metadata_->boundsChecks.podResizeToFit();
metadata_->codeRanges.podResizeToFit();
metadata_->callSites.podResizeToFit();
metadata_->callThunks.podResizeToFit();
metadata_->debugTrapFarJumpOffsets.podResizeToFit();
+ metadata_->debugFuncToCodeRange.podResizeToFit();
// For asm.js, the tables vector is over-allocated (to avoid resize during
// parallel copilation). Shrink it back down to fit.
if (isAsmJS() && !metadata_->tables.resize(numTables_))
return nullptr;
// Assert CodeRanges are sorted.
#ifdef DEBUG
--- a/js/src/wasm/WasmGenerator.h
+++ b/js/src/wasm/WasmGenerator.h
@@ -231,16 +231,17 @@ class MOZ_STACK_CLASS ModuleGenerator
jit::JitContext jcx_;
jit::TempAllocator masmAlloc_;
jit::MacroAssembler masm_;
Uint32Vector funcToCodeRange_;
Uint32Set exportedFuncs_;
uint32_t lastPatchedCallsite_;
uint32_t startOfUnpatchedCallsites_;
Uint32Vector debugTrapFarJumps_;
+ FuncArgTypesVector debugFuncArgTypes_;
// Parallel compilation
bool parallel_;
uint32_t outstanding_;
CompileTaskVector tasks_;
CompileTaskPtrVector freeTasks_;
UniqueFuncBytesVector freeFuncBytes_;
CompileTask* currentTask_;