--- a/js/src/frontend/binsource/src/main.rs
+++ b/js/src/frontend/binsource/src/main.rs
@@ -95,31 +95,35 @@ struct GlobalRules {
hpp_tokens_footer: Option<String>,
/// Documentation for the `BinKind` class enum.
hpp_tokens_kind_doc: Option<String>,
/// Documentation for the `BinField` class enum.
hpp_tokens_field_doc: Option<String>,
+ /// Documentation for the `BinVariant` class enum.
+ hpp_tokens_variants_doc: Option<String>,
+
/// Per-node rules.
per_node: HashMap<NodeName, NodeRules>,
}
impl GlobalRules {
fn new(syntax: &Spec, yaml: &yaml_rust::yaml::Yaml) -> Self {
let rules = yaml.as_hash()
.expect("Rules are not a dictionary");
let mut cpp_header = None;
let mut cpp_footer = None;
let mut hpp_class_header = None;
let mut hpp_tokens_header = None;
let mut hpp_tokens_footer = None;
let mut hpp_tokens_kind_doc = None;
let mut hpp_tokens_field_doc = None;
+ let mut hpp_tokens_variants_doc = None;
let mut per_node = HashMap::new();
for (node_key, node_entries) in rules.iter() {
let node_key = node_key.as_str()
.expect("Could not convert node_key to string");
match node_key {
"cpp" => {
@@ -135,16 +139,18 @@ impl GlobalRules {
update_rule(&mut hpp_tokens_header, &node_entries["tokens"]["header"])
.unwrap_or_else(|_| panic!("Rule hpp.tokens.header must be a string"));
update_rule(&mut hpp_tokens_footer, &node_entries["tokens"]["footer"])
.unwrap_or_else(|_| panic!("Rule hpp.tokens.footer must be a string"));
update_rule(&mut hpp_tokens_kind_doc, &node_entries["tokens"]["kind"]["doc"])
.unwrap_or_else(|_| panic!("Rule hpp.tokens.kind.doc must be a string"));
update_rule(&mut hpp_tokens_field_doc, &node_entries["tokens"]["field"]["doc"])
.unwrap_or_else(|_| panic!("Rule hpp.tokens.field.doc must be a string"));
+ update_rule(&mut hpp_tokens_variants_doc, &node_entries["tokens"]["variants"]["doc"])
+ .unwrap_or_else(|_| panic!("Rule hpp.tokens.variants.doc must be a string"));
continue;
}
_ => {}
}
let node_name = syntax.get_node_name(&node_key)
.unwrap_or_else(|| panic!("Unknown node name {}", node_key));
@@ -240,16 +246,17 @@ impl GlobalRules {
Self {
cpp_header,
cpp_footer,
hpp_class_header,
hpp_tokens_header,
hpp_tokens_footer,
hpp_tokens_kind_doc,
hpp_tokens_field_doc,
+ hpp_tokens_variants_doc,
per_node,
}
}
fn get(&self, name: &NodeName) -> NodeRules {
let mut rules = self.per_node.get(name)
.cloned()
.unwrap_or_default();
if let Some(ref parent) = rules.inherits {
@@ -311,16 +318,21 @@ struct CPPExporter {
/// Rules, as specified in yaml.
rules: GlobalRules,
/// All parsers of lists.
list_parsers_to_generate: Vec<ListParserData>,
/// All parsers of options.
option_parsers_to_generate: Vec<OptionParserData>,
+
+ /// A mapping from symbol (e.g. `+`, `-`, `instanceof`, ...) to the
+ /// name of the symbol as part of `enum class BinVariant`
+ /// (e.g. `UnaryOperatorDelete`).
+ variants_by_symbol: HashMap<String, String>,
}
impl CPPExporter {
fn new(syntax: Spec, rules: GlobalRules) -> Self {
let mut list_parsers_to_generate = vec![];
let mut option_parsers_to_generate = vec![];
for (parser_node_name, typedef) in syntax.typedefs_by_name() {
if typedef.is_optional() {
@@ -332,35 +344,63 @@ impl CPPExporter {
parser_node_name,
content_name,
content_node_name);
option_parsers_to_generate.push(OptionParserData {
name: parser_node_name.clone(),
elements: content_node_name
});
} else if let TypeSpec::Array { ref contents, ref supports_empty } = *typedef.spec() {
- let content_name = TypeName::type_(&**contents); // FIXME: Wait, do we have an implementation of type names in two places?
+ let content_name = TypeName::type_(&**contents);
let content_node_name = syntax.get_node_name(&content_name)
.unwrap_or_else(|| panic!("While generating an array parser, could not find node name {}", content_name))
.clone();
list_parsers_to_generate.push(ListParserData {
name: parser_node_name.clone(),
supports_empty: *supports_empty,
elements: content_node_name
});
}
}
list_parsers_to_generate.sort_by(|a, b| str::cmp(a.name.to_str(), b.name.to_str()));
option_parsers_to_generate.sort_by(|a, b| str::cmp(a.name.to_str(), b.name.to_str()));
+ // Prepare variant_by_symbol, which will let us lookup the BinVariant name of
+ // a symbol. Since some symbols can appear in several enums (e.g. "+"
+ // is both a unary and a binary operator), we need to collect all the
+ // string enums that contain each symbol and come up with a unique name
+ // (note that there is no guarantee of unicity – if collisions show up,
+ // we may need to tweak the name generation algorithm).
+ let mut enum_by_string : HashMap<String, Vec<NodeName>> = HashMap::new();
+ for (name, enum_) in syntax.string_enums_by_name().iter() {
+ for string in enum_.strings().iter() {
+ let vec = enum_by_string.entry(string.clone())
+ .or_insert_with(|| vec![]);
+ vec.push(name.clone());
+ }
+ }
+ let variants_by_symbol = enum_by_string.drain()
+ .map(|(string, names)| {
+ let expanded = format!("{names}{symbol}",
+ names = names.iter()
+ .map(NodeName::to_str)
+ .sorted()
+ .into_iter()
+ .format("Or"),
+ symbol = string.to_cpp_enum_case());
+ (string, expanded)
+ })
+ .collect();
+
CPPExporter {
syntax,
rules,
list_parsers_to_generate,
option_parsers_to_generate,
+ variants_by_symbol,
}
}
// ----- Generating the header
/// Get the type representing a success for parsing this node.
fn get_type_ok(&self, name: &NodeName, default: &str) -> String {
let rules_for_this_interface = self.rules.get(name);
@@ -380,17 +420,17 @@ impl CPPExporter {
kind = kind,
args = args,
)
}
fn get_method_definition_start(&self, name: &NodeName, default_type_ok: &str, prefix: &str, args: &str) -> String {
let type_ok = self.get_type_ok(name, default_type_ok);
let kind = name.to_class_cases();
- format!("JS::Result<{type_ok}>\nBinASTParser::parse{prefix}{kind}({args})",
+ format!("template<typename Tok> JS::Result<{type_ok}>\nBinASTParser<Tok>::parse{prefix}{kind}({args})",
prefix = prefix,
type_ok = type_ok,
kind = kind,
args = args,
)
}
@@ -401,52 +441,80 @@ impl CPPExporter {
buffer.push_str("\n\n");
if self.rules.hpp_tokens_kind_doc.is_some() {
buffer.push_str(&self.rules.hpp_tokens_kind_doc.reindent(""));
}
let node_names = self.syntax.node_names()
.keys()
.sorted();
- buffer.push_str(&format!("\n#define FOR_EACH_BIN_KIND(F) \\\n{nodes}\n\n",
+ buffer.push_str(&format!("\n#define FOR_EACH_BIN_KIND(F) \\\n{nodes}\n",
nodes = node_names.iter()
- .map(|name| format!(" F({name}, {name})",
- name = name))
+ .map(|name| format!(" F({enum_name}, \"{spec_name}\")",
+ enum_name = name.to_cpp_enum_case(),
+ spec_name = name))
.format(" \\\n")));
buffer.push_str("
enum class BinKind {
#define EMIT_ENUM(name, _) name,
FOR_EACH_BIN_KIND(EMIT_ENUM)
#undef EMIT_ENUM
};
");
- buffer.push_str(&format!("\n// The number of distinct values of BinKind.\nconst size_t BINKIND_LIMIT = {};\n", self.syntax.node_names().len()));
+ buffer.push_str(&format!("\n// The number of distinct values of BinKind.\nconst size_t BINKIND_LIMIT = {};\n\n\n", self.syntax.node_names().len()));
buffer.push_str("\n\n");
if self.rules.hpp_tokens_field_doc.is_some() {
buffer.push_str(&self.rules.hpp_tokens_field_doc.reindent(""));
}
let field_names = self.syntax.field_names()
.keys()
.sorted();
- buffer.push_str(&format!("\n#define FOR_EACH_BIN_FIELD(F) \\\n{nodes}\n\n",
+ buffer.push_str(&format!("\n#define FOR_EACH_BIN_FIELD(F) \\\n{nodes}\n",
nodes = field_names.iter()
- .map(|name| format!(" F({enum_name}, {spec_name})",
+ .map(|name| format!(" F({enum_name}, \"{spec_name}\")",
spec_name = name,
enum_name = name.to_cpp_enum_case()))
.format(" \\\n")));
buffer.push_str("
enum class BinField {
#define EMIT_ENUM(name, _) name,
FOR_EACH_BIN_FIELD(EMIT_ENUM)
#undef EMIT_ENUM
};
");
- buffer.push_str(&format!("\n// The number of distinct values of BinField.\nconst size_t BINFIELD_LIMIT = {};\n", self.syntax.field_names().len()));
+ buffer.push_str(&format!("\n// The number of distinct values of BinField.\nconst size_t BINFIELD_LIMIT = {};\n\n\n", self.syntax.field_names().len()));
+
+ if self.rules.hpp_tokens_variants_doc.is_some() {
+ buffer.push_str(&self.rules.hpp_tokens_variants_doc.reindent(""));
+ }
+ let enum_variants : Vec<_> = self.variants_by_symbol
+ .iter()
+ .sorted_by(|&(ref symbol_1, ref name_1), &(ref symbol_2, ref name_2)| {
+ Ord::cmp(name_1, name_2)
+ .then_with(|| Ord::cmp(symbol_1, symbol_2))
+ });
+
+ buffer.push_str(&format!("\n#define FOR_EACH_BIN_VARIANT(F) \\\n{nodes}\n",
+ nodes = enum_variants.into_iter()
+ .map(|(symbol, name)| format!(" F({variant_name}, \"{spec_name}\")",
+ spec_name = symbol,
+ variant_name = name))
+ .format(" \\\n")));
+
+ buffer.push_str("
+enum class BinVariant {
+#define EMIT_ENUM(name, _) name,
+ FOR_EACH_BIN_VARIANT(EMIT_ENUM)
+#undef EMIT_ENUM
+};
+");
+ buffer.push_str(&format!("\n// The number of distinct values of BinVariant.\nconst size_t BINVARIANT_LIMIT = {};\n\n\n",
+ self.variants_by_symbol.len()));
buffer.push_str(&self.rules.hpp_tokens_footer.reindent(""));
buffer.push_str("\n");
}
/// Declare string enums
fn export_declare_string_enums_classes(&self, buffer: &mut String) {
buffer.push_str("\n\n// ----- Declaring string enums (by lexicographical order)\n");
@@ -515,17 +583,17 @@ enum class BinField {
fn export_declare_string_enums_methods(&self, buffer: &mut String) {
buffer.push_str("\n\n// ----- String enums (by lexicographical order)\n");
buffer.push_str("// Implementations are autogenerated\n");
let string_enums_by_name = self.syntax.string_enums_by_name()
.iter()
.sorted_by(|a, b| str::cmp(a.0.to_str(), b.0.to_str()));
for (kind, _) in string_enums_by_name {
- let type_ok = format!("BinASTParser::{kind}", kind = kind.to_class_cases());
+ let type_ok = format!("typename BinASTParser<Tok>::{kind}", kind = kind.to_class_cases());
let rendered = self.get_method_signature(kind, &type_ok, "", "");
buffer.push_str(&rendered.reindent(""));
buffer.push_str("\n");
}
}
fn export_declare_list_methods(&self, buffer: &mut String) {
buffer.push_str("\n\n// ----- Lists (by lexicographical order)\n");
@@ -601,36 +669,37 @@ impl CPPExporter {
buffer.push_str(&format!("{bnf}
{first_line}
{{
BinKind kind;
BinFields fields(cx_);
AutoTaggedTuple guard(*tokenizer_);
const auto start = tokenizer_->offset();
- TRY(tokenizer_->enterTaggedTuple(kind, fields, guard));
+ MOZ_TRY(tokenizer_->enterTaggedTuple(kind, fields, guard));
- MOZ_TRY_DECL(result, parseSum{kind}(start, kind, fields));
+ BINJS_MOZ_TRY_DECL(result, parseSum{kind}(start, kind, fields));
- TRY(guard.done());
+ MOZ_TRY(guard.done());
return result;
}}\n",
bnf = rendered_bnf,
kind = kind,
first_line = self.get_method_definition_start(name, "ParseNode*", "", "")
));
// Generate inner method
let mut buffer_cases = String::new();
for node in nodes {
buffer_cases.push_str(&format!("
- case BinKind::{kind}:
- MOZ_TRY_VAR(result, parseInterface{kind}(start, kind, fields));
+ case BinKind::{variant_name}:
+ MOZ_TRY_VAR(result, parseInterface{class_name}(start, kind, fields));
break;",
- kind = node.to_class_cases()));
+ class_name = node.to_class_cases(),
+ variant_name = node.to_cpp_enum_case()));
}
buffer.push_str(&format!("\n{first_line}
{{
{type_ok} result;
switch(kind) {{{cases}
default:
return raiseInvalidKind(\"{kind}\", kind);
}}
@@ -686,25 +755,25 @@ impl CPPExporter {
let rendered = format!("\n{first_line}
{{
uint32_t length;
AutoList guard(*tokenizer_);
const auto start = tokenizer_->offset();
- TRY(tokenizer_->enterList(length, guard));{empty_check}
+ MOZ_TRY(tokenizer_->enterList(length, guard));{empty_check}
{init}
for (uint32_t i = 0; i < length; ++i) {{
- MOZ_TRY_DECL(item, parse{inner}());
+ BINJS_MOZ_TRY_DECL(item, parse{inner}());
{append}
}}
- TRY(guard.done());
+ MOZ_TRY(guard.done());
return result;
}}\n",
first_line = first_line,
empty_check =
if parser.supports_empty {
"".to_string()
} else {
format!("\n if (length == 0)\n return raiseEmpty(\"{kind}\");\n",
@@ -761,89 +830,94 @@ impl CPPExporter {
match named_implementation {
NamedType::Interface(_) => {
buffer.push_str(&format!("{first_line}
{{
BinKind kind;
BinFields fields(cx_);
AutoTaggedTuple guard(*tokenizer_);
- TRY(tokenizer_->enterTaggedTuple(kind, fields, guard));
+ MOZ_TRY(tokenizer_->enterTaggedTuple(kind, fields, guard));
{type_ok} result;
if (kind == BinKind::{null}) {{
result = {default_value};
}} else {{
const auto start = tokenizer_->offset();
MOZ_TRY_VAR(result, parseInterface{contents}(start, kind, fields));
}}
- TRY(guard.done());
+ MOZ_TRY(guard.done());
return result;
}}
",
first_line = self.get_method_definition_start(&parser.name, "ParseNode*", "", ""),
- null = self.syntax.get_null_name().to_str(),
+ null = self.syntax.get_null_name().to_cpp_enum_case(),
contents = parser.elements.to_class_cases(),
type_ok = type_ok,
default_value = default_value,
));
}
NamedType::Typedef(ref type_) => {
match type_.spec() {
&TypeSpec::TypeSum(_) => {
buffer.push_str(&format!("{first_line}
{{
BinKind kind;
BinFields fields(cx_);
AutoTaggedTuple guard(*tokenizer_);
- TRY(tokenizer_->enterTaggedTuple(kind, fields, guard));
+ MOZ_TRY(tokenizer_->enterTaggedTuple(kind, fields, guard));
{type_ok} result;
- if (kind == BinKind::_Null) {{
+ if (kind == BinKind::{null}) {{
result = {default_value};
}} else {{
const auto start = tokenizer_->offset();
MOZ_TRY_VAR(result, parseSum{contents}(start, kind, fields));
}}
- TRY(guard.done());
+ MOZ_TRY(guard.done());
return result;
}}
",
first_line = self.get_method_definition_start(&parser.name, "ParseNode*", "", ""),
contents = parser.elements.to_class_cases(),
type_ok = type_ok,
default_value = default_value,
+ null = self.syntax.get_null_name().to_cpp_enum_case(),
));
}
&TypeSpec::String => {
let build_result = rules_for_this_node.init.reindent(" ");
-
- buffer.push_str(&format!("{first_line}
+ let first_line = self.get_method_definition_start(&parser.name, "ParseNode*", "", "");
+ if build_result.len() == 0 {
+ buffer.push_str(&format!("{first_line}
{{
- RootedAtom string(cx_);
- MOZ_TRY(readMaybeString(&string));
+ return raiseError(\"FIXME: Not implemented yet ({kind})\");
+}}
+
+",
+ first_line = first_line,
+ kind = parser.name.to_str()));
+ } else {
+ buffer.push_str(&format!("{first_line}
+{{
+ BINJS_MOZ_TRY_DECL(result, tokenizer_->readMaybeAtom());
{build}
- {return_}
+ return result;
}}
",
- first_line = self.get_method_definition_start(&parser.name, "ParseNode*", "", ""),
- build = build_result,
- return_ = if build_result.len() == 0 {
- format!("return raiseError(\"FIXME: Not implemented yet ({kind})\");\n",
- kind = parser.name.to_str())
- } else {
- "return result;".to_string()
- }
- ));
+ first_line = first_line,
+ build = build_result,
+ ));
+ }
}
_else => unimplemented!("{:?}", _else)
}
}
NamedType::StringEnum(_) => {
unimplemented!()
}
}
@@ -867,21 +941,21 @@ impl CPPExporter {
// Generate public method
let kind = name.to_class_cases();
buffer.push_str(&format!("{first_line}
{{
BinKind kind;
BinFields fields(cx_);
AutoTaggedTuple guard(*tokenizer_);
- TRY(tokenizer_->enterTaggedTuple(kind, fields, guard));
+ MOZ_TRY(tokenizer_->enterTaggedTuple(kind, fields, guard));
const auto start = tokenizer_->offset();
- MOZ_TRY_DECL(result, parseInterface{kind}(start, kind, fields));
- TRY(guard.done());
+ BINJS_MOZ_TRY_DECL(result, parseInterface{kind}(start, kind, fields));
+ MOZ_TRY(guard.done());
return result;
}}
",
first_line = self.get_method_definition_start(name, "ParseNode*", "", ""),
kind = kind
));
@@ -903,39 +977,48 @@ impl CPPExporter {
.unwrap_or_default();
let needs_block = rules_for_this_field.block_before_field.is_some() || rules_for_this_field.block_after_field.is_some();
let var_name = field.name().to_cpp_field_case();
let (decl_var, parse_var) = match field.type_().get_primitive(&self.syntax) {
Some(IsNullable { is_nullable: false, content: Primitive::Number }) => {
if needs_block {
(Some(format!("double {var_name};", var_name = var_name)),
- Some(format!("MOZ_TRY_VAR({var_name}, readNumber());", var_name = var_name)))
+ Some(format!("MOZ_TRY_VAR({var_name}, tokenizer_->readDouble());", var_name = var_name)))
} else {
(None,
- Some(format!("MOZ_TRY_DECL({var_name}, readNumber());", var_name = var_name)))
+ Some(format!("BINJS_MOZ_TRY_DECL({var_name}, tokenizer_->readDouble());", var_name = var_name)))
}
}
Some(IsNullable { is_nullable: false, content: Primitive::Boolean }) => {
if needs_block {
(Some(format!("bool {var_name};", var_name = var_name)),
- Some(format!("MOZ_TRY_VAR({var_name}, readBool());", var_name = var_name)))
+ Some(format!("MOZ_TRY_VAR({var_name}, tokenizer_->readBool());", var_name = var_name)))
} else {
(None,
- Some(format!("MOZ_TRY_DECL({var_name}, readBool());", var_name = var_name)))
+ Some(format!("BINJS_MOZ_TRY_DECL({var_name}, tokenizer_->readBool());", var_name = var_name)))
+ }
+ }
+ Some(IsNullable { is_nullable: false, content: Primitive::Offset }) => {
+ if needs_block {
+ (Some(format!("uint32_t {var_name};", var_name = var_name)),
+ Some(format!("MOZ_TRY_VAR({var_name}, tokenizer_->readOffset());", var_name = var_name)))
+ } else {
+ (None,
+ Some(format!("BINJS_MOZ_TRY_DECL({var_name}, tokenizer_->readOffset());", var_name = var_name)))
}
}
Some(IsNullable { content: Primitive::Void, .. }) => {
warn!("Internal error: We shouldn't have any `void` types at this stage.");
(Some(format!("// Skipping void field {}", field.name().to_str())),
None)
}
Some(IsNullable { is_nullable: false, content: Primitive::String }) => {
(Some(format!("RootedAtom {var_name}(cx_);", var_name = var_name)),
- Some(format!("MOZ_TRY(readString(&{var_name}));", var_name = var_name)))
+ Some(format!("MOZ_TRY_VAR({var_name}, tokenizer_->readAtom());", var_name = var_name)))
}
Some(IsNullable { content: Primitive::Interface(ref interface), ..})
if &self.get_type_ok(interface.name(), "?") == "Ok" =>
{
// Special case: `Ok` means that we shouldn't bind the return value.
let typename = TypeName::type_(field.type_());
(None,
Some(format!("MOZ_TRY(parse{typename}());",
@@ -948,17 +1031,17 @@ impl CPPExporter {
var_name = var_name,
typename = typename)),
Some(format!("MOZ_TRY_VAR({var_name}, parse{typename}());",
var_name = var_name,
typename = typename)
))
} else {
(None,
- Some(format!("MOZ_TRY_DECL({var_name}, parse{typename}());",
+ Some(format!("BINJS_MOZ_TRY_DECL({var_name}, parse{typename}());",
var_name = var_name,
typename = typename)))
}
}
};
let rendered = {
if rules_for_this_field.replace.is_some() {
@@ -1026,37 +1109,38 @@ impl CPPExporter {
}}
",
kind = kind.to_str(),
first_line = first_line,
));
} else {
let check_fields = if number_of_fields == 0 {
- format!("MOZ_TRY(checkFields0(kind, fields));")
+ format!("MOZ_TRY(tokenizer_->checkFields0(kind, fields));")
} else {
- format!("MOZ_TRY(checkFields(kind, fields, {fields_type_list}));",
+ format!("MOZ_TRY(tokenizer_->checkFields(kind, fields, {fields_type_list}));",
fields_type_list = fields_type_list)
};
buffer.push_str(&format!("{first_line}
{{
MOZ_ASSERT(kind == BinKind::{kind});
CheckRecursionLimit(cx_);
{check_fields}
{pre}{fields_implem}
-{post}return result;
+{post}
+ return result;
}}
",
check_fields = check_fields,
fields_implem = fields_implem,
pre = init,
post = build_result,
- kind = kind,
+ kind = name.to_cpp_enum_case(),
first_line = first_line,
));
}
}
/// Generate C++ code for SpiderMonkey
fn to_spidermonkey_cpp(&self) -> String {
let mut buffer = String::new();
@@ -1092,53 +1176,54 @@ impl CPPExporter {
// 3. String Enums
buffer.push_str("\n\n// ----- String enums (autogenerated, by lexicographical order)\n");
{
let string_enums_by_name = self.syntax.string_enums_by_name()
.iter()
.sorted_by(|a, b| str::cmp(a.0.to_str(), b.0.to_str()));
for (kind, enum_) in string_enums_by_name {
- let convert = format!("{cases}
-
- return raiseInvalidEnum(\"{kind}\", chars);",
+ let convert = format!(" switch (variant) {{
+{cases}
+ default:
+ return raiseInvalidVariant(\"{kind}\", variant);
+ }}",
kind = kind,
cases = enum_.strings()
.iter()
- .map(|string| {
- format!(" if (chars == \"{string}\")
- return {kind}::{variant};",
- string = string,
+ .map(|symbol| {
+ format!(" case BinVariant::{binvariant_variant}:
+ return {kind}::{specialized_variant};",
kind = kind,
- variant = string.to_cpp_enum_case()
+ specialized_variant = symbol.to_cpp_enum_case(),
+ binvariant_variant = self.variants_by_symbol.get(symbol)
+ .unwrap()
)
})
.format("\n")
);
let rendered_doc = format!("/*\nenum {kind} {{\n{cases}\n}};\n*/\n",
kind = kind,
cases = enum_.strings()
.iter()
.map(|s| format!(" \"{}\"", s))
.format(",\n")
);
buffer.push_str(&format!("{rendered_doc}{first_line}
{{
- // Unoptimized implementation.
- Chars chars(cx_);
- MOZ_TRY(readString(chars));
+ BINJS_MOZ_TRY_DECL(variant, tokenizer_->readVariant());
{convert}
}}
",
rendered_doc = rendered_doc,
convert = convert,
- first_line = self.get_method_definition_start(kind, &format!("BinASTParser::{kind}", kind = kind), "", "")
+ first_line = self.get_method_definition_start(kind, &format!("typename BinASTParser<Tok>::{kind}", kind = kind), "", "")
));
}
}
// 4. Lists
buffer.push_str("\n\n// ----- Lists (autogenerated, by lexicographical order)\n");
for parser in &self.list_parsers_to_generate {
self.generate_implement_list(&mut buffer, parser);
@@ -1211,18 +1296,18 @@ fn main() {
.expect("Could not read source");
println!("...parsing webidl");
let ast = webidl::parse_string(&source)
.expect("Could not parse source");
println!("...verifying grammar");
let mut builder = Importer::import(&ast);
- let fake_root = builder.node_name("");
- let null = builder.node_name("_Null");
+ let fake_root = builder.node_name("@@ROOT@@"); // Unused
+ let null = builder.node_name(""); // Used
builder.add_interface(&null)
.unwrap();
let syntax = builder.into_spec(SpecOptions {
root: &fake_root,
null: &null,
});
let deanonymizer = TypeDeanonymizer::new(&syntax);